Я пытаюсь обучить CNN, используя мои собственные данные для проблемы двоичной классификации. Но у меня возникла проблема с ожидаемым размером в качестве входных данных, который я думал (224,224,3). Я искал этот случай и обнаружил, что некоторые люди говорили, что его можно исправить, изменив размер изображения с (224,224,3) до (1,224,224,3), но это не сработало.
вот мой код:
import scipy.io
import tensorflow as tf
import cv2
# Parameters
img_height = 224
img_width = 224
img_depth = 3
classes = 2
# Load Data
db_name = 'polo'
db_path = 'D:/databases/' + db_name + '/'
db_data = scipy.io.loadmat(db_path + 'db_py.mat')
db_size = len(db_data['db']['images'][0][0][0])
faces_path = 'data/' + db_name + '/faces/'
images = []
labels = [0] * db_size
for i in range(0,db_size):
filename = 'data/' + db_name + '/faces/' + db_data['db']['images'][0][0][0][i][2][0]
image = cv2.imread(filename)
image = cv2.resize(image, (img_height, img_width))
images.append(image)
labels[i] = db_data['db']['subjects'][0][0][0][i][4][0][0][0][0][0]
inputs = tf.keras.layers.Input(shape=(img_height,img_width,img_depth))
layers = tf.keras.layers.Conv2D(32, (3, 3), padding="same")(inputs)
layers = tf.keras.layers.Activation("relu")(layers)
layers = tf.keras.layers.BatchNormalization(axis=-1)(layers)
layers = tf.keras.layers.Conv2D(32, (3, 3), padding="same")(layers)
layers = tf.keras.layers.Activation("relu")(layers)
layers = tf.keras.layers.BatchNormalization(axis=-1)(layers)
layers = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(layers)
layers = tf.keras.layers.Dropout(0.25)(layers)
layers = tf.keras.layers.Conv2D(64, (3, 3), padding="same")(layers)
layers = tf.keras.layers.Activation("relu")(layers)
layers = tf.keras.layers.BatchNormalization(axis=-1)(layers)
layers = tf.keras.layers.Conv2D(64, (3, 3), padding="same")(layers)
layers = tf.keras.layers.Activation("relu")(layers)
layers = tf.keras.layers.BatchNormalization(axis=-1)(layers)
layers = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(layers)
layers = tf.keras.layers.Dropout(0.25)(layers)
layers = tf.keras.layers.Flatten()(layers)
layers = tf.keras.layers.Dense(512)(layers)
layers = tf.keras.layers.Activation("relu")(layers)
layers = tf.keras.layers.BatchNormalization()(layers)
layers = tf.keras.layers.Dropout(0.5)(layers)
layers = tf.keras.layers.Dense(classes)(layers)
layers = tf.keras.layers.Activation("softmax")(layers)
InitialLearnRate = 0.03
MaxEpochs = 30
MiniBatchSize = 32
opt = tf.keras.optimizers.SGD(lr=InitialLearnRate, decay=InitialLearnRate / MaxEpochs)
model = tf.keras.Model(inputs, layers , name="net")
model.compile(loss="categorical_crossentropy", optimizer=opt,
metrics=["accuracy"])
model.summary()
H = model.fit(images, labels,
batch_size=MiniBatchSize, epochs=MaxEpochs, verbose=1,steps_per_epoch=10)