OSError: Невозможно создать файл (невозможно открыть файл: name = '../working/best.hdf5', errno = 2, сообщение об ошибке = 'Нет такого файла или каталога' - PullRequest
0 голосов
/ 11 октября 2019

Я пытаюсь запустить этот код, но мне все еще мешает эта ошибка: OSError: Невозможно создать файл (невозможно открыть файл: name = '../working/best.hdf5', errno = 2, сообщение об ошибке= 'Нет такого файла или каталога', flags = 13, o_flags = 302)

Пожалуйста, если у кого-то была эта ошибка, и как ее решить, как я могу это исправить? Спасибо

NUM_CLASSES = 2
CHANNELS = 3
IMAGE_RESIZE = 224
RESNET50_POOLING_AVERAGE = 'avg'
DENSE_LAYER_ACTIVATION = 'softmax'
OBJECTIVE_FUNCTION = 'binary_crossentropy'
LOSS_METRICS = ['accuracy']
NUM_EPOCHS = 10
EARLY_STOP_PATIENCE = 3
STEPS_PER_EPOCH_TRAINING = 10
STEPS_PER_EPOCH_VALIDATION = 10
BATCH_SIZE_TRAINING = 100
BATCH_SIZE_VALIDATION = 100
BATCH_SIZE_TESTING = 1
resnet_weights_path = '../input/resnet50/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5'
model = Sequential()
train_data_dir = "C:\\Users\\Desktop\\RESNET"
model = ResNet50(include_top=True, weights='imagenet')
model = ResNet50(include_top=False, pooling='avg', weights='imagenet')  # `pooling='avg'` makes the  `ResNet50` include a `GlobalAveragePoiling` layer and `include_top=False` means that you don't include  the imagenet's output layer
x = model.output  
predictions = Dense(1, activation='sigmoid')(x)
predictions = Dense(1, activation='sigmoid')(x)
model = Model(input = model.input, output = predictions)
model.summary()
print(model.summary())
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='binary_crossentropy', optimizer=SGD(lr=0.01, momentum=0.9), metrics=  ['binary_accuracy'])
data_dir = "C:\\Users\\Desktop\\RESNET"
batch_size = 32
from keras.applications.resnet50 import preprocess_input
from keras.preprocessing.image import ImageDataGenerator
image_size = IMAGE_RESIZE
data_generator = ImageDataGenerator(preprocessing_function=preprocess_input)
def append_ext(fn):
    return fn+".jpg"
from os import listdir
from os.path import isfile, join
dir_path = os.path.dirname(os.path.realpath(__file__))
train_dir_path = dir_path + '\data'
onlyfiles = [f for f in listdir(dir_path) if isfile(join(dir_path, f))]
NUM_CLASSES = 2
data_labels = [0, 1]
t = []
maxi = 25145
LieOffset = 15799
i=0
while i < maxi: # t = tuple
if i <= LieOffset:
    t.append(label['Lie'])
else:
    t.append(label['Truth'])
i = i+1
train_datagenerator = ImageDataGenerator(rescale=1./255,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True,
    validation_split=0.2)
validation_generator = train_datagenerator.flow_from_directory(
    train_data_dir, # same directory as training data kifkif
    target_size=(image_size, image_size), 
    batch_size=BATCH_SIZE_TRAINING,
    class_mode='binary', shuffle=False, subset='validation') # set as validation data

(BATCH_SIZE_TRAINING, len(train_generator), BATCH_SIZE_VALIDATION, len(validation_generator))
from tensorflow.python.keras.callbacks import EarlyStopping, ModelCheckpoint

cb_early_stopper = EarlyStopping(monitor = 'val_loss', patience = EARLY_STOP_PATIENCE)
cb_checkpointer = ModelCheckpoint(filepath = '../working/best.hdf5', monitor = 'val_loss',     save_best_only = True, mode = 'auto')
fit_history = model.fit_generator(
    train_generator,
    steps_per_epoch=STEPS_PER_EPOCH_TRAINING,
    epochs = NUM_EPOCHS,
    validation_data=validation_generator,
    validation_steps=STEPS_PER_EPOCH_VALIDATION,
    callbacks=[cb_checkpointer, cb_early_stopper]

) model.load_weights ("../ working / best.hdf5")

...