Я пытаюсь запустить изображение через простой Conv net и получить x и ay (координаты) в качестве выходных данных. У меня есть pandas Dataframe, состоящий из трех списков:
df = pd.DataFrame({'filename':full_path_list, 'x_coordinate': x_coordinate,'y_coordinate': y_coordinate})
full_path_list: список строк, которые являются путями к изображениям
x_coordinate: координата x, которую я пытаюсь предсказать позже
y_coordinate: координата y, которую я пытаюсь предсказать позже
здесь data_generators:
train_datagen = ImageDataGenerator(
validation_split = 0.2,
rescale = 1./255,)
train_generator = train_datagen.flow_from_dataframe(df, class_mode = 'multi_output', y_col = ['x_coordinate', 'y_coordinate'], target_size = (172, 172))
это модель, которую я использую:
IMG_HEIGHT = 172
IMG_WIDTH = 172
model = Sequential([
Conv2D(16, 3, padding='same', activation='relu', input_shape=(IMG_HEIGHT, IMG_WIDTH ,3)),
MaxPooling2D(),
Conv2D(32, 3, padding='same', activation='relu'),
MaxPooling2D(),
Conv2D(64, 3, padding='same', activation='relu'),
MaxPooling2D(),
Flatten(),
Dense(512, activation='relu'),
Dense(2, activation='relu')
])
model.compile(optimizer='adam',
loss='mean_squared_error',
metrics=['mean_absolute_error'], '[mean_squared_error'])
model.fit_generator(generator=train_generator,
epochs=15
)
Ошибка, которую я получаю, исправляется, если я изменяю:
y_col = ['x_coordinate', 'y_coordinate']
на
y_col = ['x_coordinate']
и пытаюсь предсказать только одно значение, но я не хочу использовать 2 сети для каждого значения, которое я хочу предсказать. Что-то должно быть не так в способе ввода данных. Вот точная ошибка:
Epoch 1/15
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-129-9becd295e855> in <module>
4 # validation_data=valid_generator,
5 # validation_steps=STEP_SIZE_VALID,
----> 6 epochs=15
7 )
~\Anaconda3\envs\Eternium\lib\site-packages\tensorflow_core\python\keras\engine\training.py in fit_generator(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, validation_freq, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
1295 shuffle=shuffle,
1296 initial_epoch=initial_epoch,
-> 1297 steps_name='steps_per_epoch')
1298
1299 def evaluate_generator(self,
~\Anaconda3\envs\Eternium\lib\site-packages\tensorflow_core\python\keras\engine\training_generator.py in model_iteration(model, data, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, validation_freq, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch, mode, batch_size, steps_name, **kwargs)
263
264 is_deferred = not model._is_compiled
--> 265 batch_outs = batch_function(*batch_data)
266 if not isinstance(batch_outs, list):
267 batch_outs = [batch_outs]
~\Anaconda3\envs\Eternium\lib\site-packages\tensorflow_core\python\keras\engine\training.py in train_on_batch(self, x, y, sample_weight, class_weight, reset_metrics)
971 outputs = training_v2_utils.train_on_batch(
972 self, x, y=y, sample_weight=sample_weight,
--> 973 class_weight=class_weight, reset_metrics=reset_metrics)
974 outputs = (outputs['total_loss'] + outputs['output_losses'] +
975 outputs['metrics'])
~\Anaconda3\envs\Eternium\lib\site-packages\tensorflow_core\python\keras\engine\training_v2_utils.py in train_on_batch(model, x, y, sample_weight, class_weight, reset_metrics)
251 x, y, sample_weights = model._standardize_user_data(
252 x, y, sample_weight=sample_weight, class_weight=class_weight,
--> 253 extract_tensors_from_dataset=True)
254 batch_size = array_ops.shape(nest.flatten(x, expand_composites=True)[0])[0]
255 # If `model._distribution_strategy` is True, then we are in a replica context
~\Anaconda3\envs\Eternium\lib\site-packages\tensorflow_core\python\keras\engine\training.py in _standardize_user_data(self, x, y, sample_weight, class_weight, batch_size, check_steps, steps_name, steps, validation_split, shuffle, extract_tensors_from_dataset)
2517 shapes=None,
2518 check_batch_axis=False, # Don't enforce the batch size.
-> 2519 exception_prefix='target')
2520
2521 # Generate sample-wise weight values given the `sample_weight` and
~\Anaconda3\envs\Eternium\lib\site-packages\tensorflow_core\python\keras\engine\training_utils.py in standardize_input_data(data, names, shapes, check_batch_axis, exception_prefix)
529 'Expected to see ' + str(len(names)) + ' array(s), '
530 'but instead got the following list of ' +
--> 531 str(len(data)) + ' arrays: ' + str(data)[:200] + '...')
532 elif len(names) > 1:
533 raise ValueError('Error when checking model ' + exception_prefix +
ValueError: Error when checking model target: the list of Numpy arrays that you are passing to your model is not the size the model expected. Expected to see 1 array(s), but instead got the following list of 2 arrays: [array([[1144],
[ 820],
[ 428],
[ 525],
[ 478],
[1365],
[ 479],
[ 260],
[ 763],
[ 519],
[ 949],
[ 626],
[ 703],
...