Почему моя кривая точности обучения и валидации такая - PullRequest
0 голосов
/ 06 февраля 2020

Когда я запустил свой код с использованием набора данных BreakHist, я обнаружил, что моя кривая точности обучения и проверки, потери обучения и проверки не являются нормальными. Я вспоминаю, что прерываю обучение, когда потери при проверке больше не уменьшаются сильный текст Я спрашиваю: кто-то знает, почему?

Точность обучения и проверки Обучение и проверка потерь

**import os
import cv2

import numpy as np
from tqdm import tqdm
from PIL import Image
from keras.utils.np_utils import to_categorical
from sklearn.model_selection import train_test_split

from keras.optimizers import Adam
from keras.preprocessing.image import ImageDataGenerator
from keras.callbacks import EarlyStopping
from keras import backend as k
#Our CNN Model
from keras.models import Sequential
from keras.layers import Conv2D,MaxPooling2D,Activation,Flatten,Dropout
from keras.layers import Dense

from UsefulFunctions import PlotFigure


NBepochs =300#
batch_size =32
#dimension de l'image
ImageW,ImageH=224,224

if k.image_data_format()=='channel_fisrt':#(3,224,224)
    input_shape=(3,ImageW,ImageH)
else:
    input_shape=(ImageW,ImageH,3)#(224,224,3)

print("[INFO] loading dataset...")
def Dataset_loader(DIR, RESIZE, sigmaX=10):
    IMG = []
    read = lambda imname: np.asarray(Image.open(imname).convert("RGB"))
    for IMAGE_NAME in tqdm(os.listdir(DIR)):
        PATH = os.path.join(DIR,IMAGE_NAME)
        _, ftype = os.path.splitext(PATH)
        if ftype == ".png":
            img = read(PATH)

            img = cv2.resize(img, (RESIZE,RESIZE))
            IMG.append(img 
    return IMG

benign_images = np.array(Dataset_loader('../BinaryData/40X/Benign/train',ImageW))
malign_images = np.array(Dataset_loader('../BinaryData/40X/Malignant/train',ImageW))

#Normal: 0, Bening: 1; Malign: 2
benign_label = np.zeros(len(benign_images))
malign_label = np.ones(len(malign_images))



X = np.concatenate((benign_images, malign_images), axis = 0)
y = np.concatenate((benign_label, malign_label), axis = 0)

y = to_categorical(y, num_classes = 2)
print(y.shape)
#partition the data into training and testing splits test_size=0.2 20% de test, 80% de training
X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.2,random_state=11)





def MyCNN(input_shape):
    NewModel=Sequential()
    NewModel.add(Conv2D(16,(3,3), padding="same", input_shape=input_shape))

    NewModel.add(Activation('relu'))
    NewModel.add(MaxPooling2D(pool_size=(2,2)))

    NewModel.add(Conv2D(32,(3,3), padding="same", input_shape=input_shape))
    NewModel.add(Activation('relu'))
    NewModel.add(MaxPooling2D(pool_size=(2,2)))

    NewModel.add(Conv2D(64,(3,3), input_shape=input_shape))
    NewModel.add(Activation('relu'))
    NewModel.add(MaxPooling2D(pool_size=(2,2)))
    #on peut la rajouter
    NewModel.add(Conv2D(128,(3,3), input_shape=input_shape))
    NewModel.add(Activation('relu'))
    NewModel.add(MaxPooling2D(pool_size=(2,2)))

    NewModel.add(Conv2D(256,(3,3), input_shape=input_shape))
    NewModel.add(Activation('relu'))
    NewModel.add(MaxPooling2D(pool_size=(2,2)))
    NewModel.add(Conv2D(512,(3,3), input_shape=input_shape))
    NewModel.add(Activation('relu'))
    NewModel.add(MaxPooling2D(pool_size=(2,2)))


    NewModel.add(Flatten())
    NewModel.add(Dense(512))
    NewModel.add(Activation('relu'))
    NewModel.add(Dropout(0.5))
    NewModel.add(Dense(2,activation='sigmoid'))

    return NewModel



optimizer = Adam(lr=0.001,beta_1=0.6, beta_2=0.999,  decay=0.0)
#optimizer = SGD(lr=0.0001, momentum= 0.9)
print("[INFO] CNN...")
MyModel=MyCNN(input_shape)
#compiler notre model
MyModel.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy'])

MyModel.summary()

#Data Augmentation
train_datagen = ImageDataGenerator(rescale=1 / 255.0,
                                   rotation_range=10,
                                   width_shift_range=0.2,
                                   height_shift_range=0.2,
                                   shear_range=0.2,
                                   zoom_range=0.2,
                                   horizontal_flip=True,
                                   fill_mode='nearest')

train_datagen.fit(X_train)

val_datagen = ImageDataGenerator()#Data Augmentation pour le validation
val_datagen.fit(X_val)
#val_datagen.fit(100)

#learning_rate_reduction = ReduceLROnPlateau(monitor='val_acc', patience=2, verbose=1, factor=0.2, min_lr=0.00001)



#training
print("[INFO] Training...")
early_stopping = EarlyStopping(monitor='val_loss', patience=100)#or monitor='val_acc' 
history = MyModel.fit_generator(
                              train_datagen.flow(X_train,y_train, batch_size=batch_size),
                              steps_per_epoch=(X_train.shape[0] // batch_size),#nb Training=nombre des images dans le dossier train et validation/batchsize
                              epochs = NBepochs,
                              verbose = 1,
                              validation_data = val_datagen.flow(X_val, y_val),
                              validation_steps=(X_val.shape[0] // batch_size),
                              callbacks=[early_stopping]#interrupt training when the validation loss isn't decreasing anymore
                              )


#history = MyModel.fit_generator(train_datagen.flow(X_train,y_train, batch_size=batch_size),
#                              epochs = epochs, validation_data = val_datagen.flow(X_val, y_val),
#                              verbose = 1, steps_per_epoch=(X_train.shape[0] // batch_size), callbacks=[learning_rate_reduction])

# Test
print("[INFO] TEST...")
benign_images_test = np.array(Dataset_loader('../BinaryData/40X/Benign/validation',ImageW))
malign_images_test = np.array(Dataset_loader('../BinaryData/40X/Malignant/validation',ImageW))

benign_label_test = np.zeros(len(benign_images_test))
malign_label_test = np.ones(len(malign_images_test))

X_test = np.concatenate((benign_images_test, malign_images_test), axis = 0)
y_test = np.concatenate((benign_label_test, malign_label_test), axis = 0)

y_test = to_categorical(y_test, num_classes = 2)

loss_test, acc_test = MyModel.evaluate(X_test, y_test, verbose=1)
loss_val, acc_val = MyModel.evaluate(X_val, y_val, verbose=1)

print("Validation: accuracy = %f  ;  loss_v = %f" % (acc_val, loss_val))
print("Test: accuracy = %f  ;  loss = %f" % (acc_test, loss_test))

print("[INFO] SAVE MODEL...")
MyModel.save("Model/B_CNN_40XB.h5")

# Retrieve a list of accuracy results on training and test data
# sets for each training epoch

print("[INFO] EVALUATE  MODEL...")


#SaveHistory(history,'40XB')
# list all data in history
print(history.history.keys())


PlotFigure(history,MyModel,X_test,y_test,'40XB')


  [1]: https://i.stack.imgur.com/CyX5P.png


import matplotlib
#matplotlib.use('Agg')
import matplotlib.pyplot as p
p.switch_backend('agg')



from sklearn.metrics import classification_report


#############################################################################     

def PlotFigure(history,MyModel,X_test,y_test,Val):
   acc = history.history['accuracy']
   val_acc = history.history['val_accuracy']

# Retrieve a list of list results on training and test data
# sets for each training epoch
   loss = history.history['loss']
   val_loss = history.history['val_loss']

# Get number of epochs
   epochs = range(len(acc))

# Plot training and validation accuracy per epoch
   p.ioff()
   fig1 = p.figure()
   p.plot(epochs, acc,label='acc')
   p.plot(epochs, val_acc,label='val_acc')
   p.title('Training and validation accuracy')
   p.xlabel("Epoch #")
   p.ylabel("Validation Accuracy")
   p.legend()
#   fig1 = p.gcf()
#   
#   p.draw()
   fig1.savefig('Model/Training_Validation_Accuracy_'+Val+'.png',dpi=300)
   p.show()
   p.close(fig1)
    #p.figure()

    # Plot training and validation loss per epoch
   fig2 = p.figure()
   p.plot(epochs, loss,label='Loss')
   p.plot(epochs, val_loss,label='val_Loss')
   p.title('Training and validation loss')
   p.xlabel("Epoch #")
   p.ylabel("Validation Loss")
   p.legend()
#   fig2 = p.gcf()
#   
#   p.draw()
   fig2.savefig('Model/Training_Validation_Loss_'+Val+'.png',dpi=300)
   p.show()
   p.close(fig2)

   print('Model/Training_Validation_Loss_'+Val+'.png')

   print("[INFO] CLASSIFICATION REPORT...")

   from numpy import argmax
   y_pred_prob=MyModel.predict(X_test)
   y_pred=argmax(y_pred_prob,axis=1)
   y_test=argmax(y_test,axis=1)
   label_test_test_testNames=["Benign","Malignant"]
   print (classification_report(y_test,y_pred,target_names=label_test_test_testNames))

   print("[INFO] ROC CURVES...")

   import scikitplot as skp
   skp.metrics.plot_roc(y_test, y_pred_prob)
   fig3 = p.gcf()
   p.show()
   p.draw()
   fig3.savefig('History/ROC_'+Val+'.png',dpi=300)**
Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...