Тонкая настройка Vgg16 на настроенную модель - PullRequest
1 голос
/ 20 января 2020

Я пытаюсь использовать трансферное обучение для обучения моего другого набора данных на пользовательской предварительно обученной модели iimdata.h5, и это, похоже, вызывает ошибку значения.

ValueError: Вы пытаетесь загрузить файл весов, содержащий 3 слоя, в модель с 0 слоями.

import matplotlib.pyplot as plt
from keras import applications
from keras.preprocessing.image import ImageDataGenerator
from keras import optimizers
from keras.models import Sequential
from keras.layers import Dropout, Flatten, Dense

# path to the model weights files.
weights_path = 'C:/Users/444/.spyder-py3/IAM/iimdata.h5'
top_model_weights_path = 'C:/Users/444/.spyder-py3/IAM/iimdata.h5'
# dimensions of our images.
img_width, img_height = 500, 500

nb_train_samples = 20
batch_size = 16

# build the VGG16 network
model = applications.VGG16(weights='imagenet', include_top=False, input_shape=(500, 500,3))
print('Model loaded.')

# build a classifier model to put on top of the convolutional model
top_model = Sequential()
top_model.add(Flatten())
top_model.add(Dense(1024, activation='tanh'))
top_model.add(Dropout(0.5))
top_model.add(Dense(20, activation='softmax'))


# in order to successfully do fine-tuning
top_model.load_weights(top_model_weights_path)   # error here

# add the model on top of the convolutional base
model.add(top_model)

# set the first 25 layers (up to the last conv block)
# to non-trainable (weights will not be updated)
for layer in model.layers[:-4]:
    layer.trainable = False

# compile the model with a SGD/momentum optimizer
# and a very slow learning rate.
model.compile(loss='categorical_crossentropy',
              optimizer=optimizers.SGD(lr=1e-4, momentum=0.9),
              metrics=['accuracy'])

# prepare data augmentation configuration
train_datagen = ImageDataGenerator(
    rescale=1. / 255)



# generator for reading traindata from folder
train_generator = train_datagen.flow_from_directory(
   "C:/Users/444/.spyder-py3/IAM/Training-test/train_patches/",
    target_size = (500,500),
    color_mode = 'rgb',
    batch_size = batch_size,
    class_mode = 'categorical')


# fine-tune the model
history= model.fit_generator(
    train_generator,
    samples_per_epoch=nb_train_samples,
    epochs=30)

model_json = model.to_json()
open('rus.json','w').write(model_json)
model.save_weights('rus.h5',overwrite=True) 

Пожалуйста, помогите мне в решении этой ошибки.

...