Получение этой ошибки при настройке модели:
train_generator = my_data_generator (x_train ['path']. Values, y_train, is_training = True, batch_size = 32) model_1.fit_generator (generator = train_generator, steps_per_epoch = train_steps_per_epoch, эпох = 10)
def my_data_generator(images, labels, is_training, batch_size=32):
'''
Custom data generator using tf.Dataset
'''
def parse_function(filename, label):
#reading path
image_string = tf.io.read_file(filename)
#decoding image
image = tf.image.decode_jpeg(image_string, channels=3)
# This will convert to float values in [0, 1]
image = tf.image.convert_image_dtype(image, tf.float32)
#resize the image
image = tf.image.resize(image, [128, 128])
#one hot coding for label
y = tf.one_hot(tf.cast(label, tf.uint8), 8)
return image, y
##creating a dataset from tensorslices
dataset = tf.data.Dataset.from_tensor_slices((images, labels))
if is_training:
dataset = dataset.shuffle(10000) # depends on sample size
# Transform and batch data at the same time
dataset = dataset.apply(tf.data.experimental.map_and_batch( parse_function, batch_size,num_parallel_batches=4, # cpu cores
drop_remainder=True if is_training else False))
#repeat the dataset indefinitely
dataset = dataset.repeat()
#prefetch the data into CPU/GPU
dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE)
return dataset