Нормализованная перекрестная валидация Можно ли его использовать для тренировки? - PullRequest
0 голосов
/ 10 апреля 2020

Я пытаюсь тренировать модель керас, используя лямбда-слои. По сути, цель модели - найти максимальную нормализованную взаимную корреляцию между двумя изображениями. Для этого я хочу определить два обучаемых параметра (deltaX, deltaY), которые определяют, какие срезы я хочу извлечь из второго изображения.

Проблема в том, что когда я использую K.slice, я получаю эту ошибку (во время выполнения model.fit):

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-17-24d6e2fd4890> in <module>
      1 epochs = 10
      2 
----> 3 history = model.fit(x=[reference_im, reference_im_sq, imageB, np.array([0])], y=np.array([0]), steps_per_epoch = 1, epochs = epochs, verbose = 1)

~/.local/lib/python3.6/site-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
   1211         else:
   1212             fit_inputs = x + y + sample_weights
-> 1213         self._make_train_function()
   1214         fit_function = self.train_function
   1215 

~/.local/lib/python3.6/site-packages/keras/engine/training.py in _make_train_function(self)
    314                     training_updates = self.optimizer.get_updates(
    315                         params=self._collected_trainable_weights,
--> 316                         loss=self.total_loss)
    317                 updates = self.updates + training_updates
    318 

~/.local/lib/python3.6/site-packages/keras/legacy/interfaces.py in wrapper(*args, **kwargs)
     89                 warnings.warn('Update your `' + object_name + '` call to the ' +
     90                               'Keras 2 API: ' + signature, stacklevel=2)
---> 91             return func(*args, **kwargs)
     92         wrapper._original_function = func
     93         return wrapper

~/.local/lib/python3.6/site-packages/keras/optimizers.py in get_updates(self, loss, params)
    502     @K.symbolic
    503     def get_updates(self, loss, params):
--> 504         grads = self.get_gradients(loss, params)
    505         self.updates = [K.update_add(self.iterations, 1)]
    506 

~/.local/lib/python3.6/site-packages/keras/optimizers.py in get_gradients(self, loss, params)
     91         grads = K.gradients(loss, params)
     92         if any(x is None for x in grads):
---> 93             raise ValueError('An operation has `None` for gradient. '
     94                              'Please make sure that all of your ops have a '
     95                              'gradient defined (i.e. are differentiable). '

ValueError: An operation has `None` for gradient. Please make sure that all of your ops have a gradient defined (i.e. are differentiable). Common ops without gradient: K.argmax, K.round, K.eval.

Здесь я определяю мое эталонное изображение (imageA):

# Image non-translated
window = 200
reference_im = imageA[pc_25:pc_75,-(window+1):-1,:]
reference_im = (reference_im - np.mean(reference_im,0)) 
reference_im_sq = reference_im**2 

Вот модель, которую я определяю (изменяет слои, удаляя размер пакета):

from keras.layers import Input, Lambda, Reshape
from keras.models import Model

# Reference image
input_reference = Input(shape=(pc_75-pc_25,window,3))
reference = Reshape([pc_75-pc_25,window,3]) (input_reference)

# Reference image squared (element-wise)
input_reference_sq = Input(shape=(pc_75-pc_25,window,3))
reference_sq = Reshape([pc_75-pc_25,window,3]) (input_reference_sq)

# Second image which I wanna compare with the reference
input_imageB = Input(shape=(imageB.shape[0],imageB.shape[1],imageB.shape[2]), batch_shape=0)
translated = Lambda(lambda inputs: K.reshape(inputs, (6720, 4480, 3) ) ) (input_imageB )

# Two artificial layers to create two trainable parameters
fake_input = Input(shape=(1,), batch_shape=0)

deltaX_lamb = Lambda(lambda x: deltaX_lamb.weights[0][0], name='deltaX_lamb')
deltaX_lamb.add_weight(name='y', shape=(1,1), initializer='zero', trainable=True, dtype='int32')
deltaX = deltaX_lamb(fake_input)

deltaY_lamb = Lambda(lambda y: deltaY_lamb.weights[0][0], name='deltaY_lamb')
deltaY_lamb.add_weight(name='y', shape=(1,1), initializer='zero', trainable=True, dtype='int32')
deltaY = deltaY_lamb(fake_input)


# Is this okay? 
translated = Lambda(lambda inputs: K.slice(inputs[0], [pc_25-initY+inputs[2][0], (imageA.shape[1]-initX)+inputs[1][0]-window,0], [pc_75-pc_25,window,3]), name='slice')  ([translated, deltaX, deltaY])

# Compute normalized cross correlation
translated = Lambda(lambda translated: (translated - K.mean(translated,0)), name='mean' )(translated)
translated_sq = Lambda(lambda translated: K.square(translated) )(translated)

num = Lambda(lambda inputs: K.sum(inputs[0]*inputs[1]) )([reference,translated])
den = Lambda(lambda inputs: K.sqrt(K.sum(inputs[0])*K.sum(inputs[1])) )([reference_sq,translated_sq])
ncc = Lambda(lambda to_divide: - to_divide[0] / to_divide[1])([num,den])

model = Model(inputs=[input_reference, input_reference_sq, input_imageB, fake_input], outputs=ncc)

И остальной код:

#custom loss
def ncc_loss(y_true, y_pred):
    return y_pred

# compile the model 
adam = keras.optimizers.Adam(lr=1)
model.compile(optimizer=adam, loss=ncc_loss) 

# reshape inputs (to get the batch size dimension, that I immediately remove in the model)
reference_im = reference_im.reshape([1] + list(reference_im.shape))
reference_im_sq = reference_im_sq.reshape([1] + list(reference_im_sq.shape)) 
imageB = imageB.reshape([1] + list(imageB.shape))

# train
epochs = 10
history = model.fit(x=[reference_im, reference_im_sq, imageB, np.array([0])], y=np.array([0]), steps_per_epoch = 1, epochs = epochs, verbose = 1)

Заранее спасибо !!!

Raül

Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...