Взвешенная двоичная unet сегментация только предсказывает фон на тестовом наборе, несмотря на 0,8 кубика на проверочном наборе - PullRequest
0 голосов
/ 24 марта 2020

Я запускаю задачу двоичной сегментации с использованием модели UNET на 3000 изображений. Пиксели моего переднего плана (особый тип клеток) встречаются реже, чем пиксели моего фона (тип ткани) с соотношением ок. 3: 1 (в 3 раза больше фона, чем на переднем плане). Чтобы попытаться исправить этот дисбаланс, я использую взвешенную двоичную перекрестную энтропийную функцию для потери. Чтобы измерить, насколько хорошо работает каждая модель, я использую коэффициент кости на наборе валидации. Что я нахожу (как показано ниже), так это то, что мои потери от тренировок и количество костей уменьшаются и увеличиваются соответственно довольно хорошо. Однако, хотя моя потеря изначально уменьшается на проверочном наборе, она затем довольно беспорядочно прыгает, и аналогично коэффициент костей может подскочить с приблизительно 0,7 / 0,8 до 0,4 уровня довольно поздно на тренировках. Тем не менее, кажется, что он стабилизируется на уровне около 0,8, что говорит о хорошей способности сегментировать изображения, но когда я беру свою модель и применяю ее для проверки данных, моя модель только предсказывает пиксели фона. Я запутался, как я мог получить коэффициент кости 0,8 на наборе проверки, но тогда не правильно классифицировать любой из переднего плана на наборе испытаний. Мой следующий шаг - увеличение, но я все еще думал бы, что моя модель должна обойтись без этого. Любые идеи о том, что я мог бы изменить, настроить или посмотреть, будет высоко ценится? Код и вывод ниже

import os
import sys
import csv
import cv2
import glob
import numpy as np
import pickle
import random
import argparse
import json
import tensorflow as tf
from sklearn.utils import class_weight
from tensorflow import keras
from skimage.transform import resize
from skimage import img_as_bool
from tensorflow.keras import backend as K
from tensorflow.keras.losses import binary_crossentropy

from resunet_multi import Unet
from fcn8 import FCN
from utilities import saveModel, saveHistory
from evaluation import dice_coef_loss, dice_coef
from custom_datagenerator_three import DataGenerator
from custom_loss_functions import weightedCatXEntropy, weighted_cross_entropy
import tfrecord_read



class Unet():
    def __init__(self, imgDims, nOutput=1, finalActivation='sigmoid', activation='relu', padding='same'):
        self.imgDims = imgDims
        self.activation = activation
        self.finalActivation = finalActivation
        self.padding = padding
        self.nOutput = nOutput


    def convBlocks(self, x, filters, kernelSize=(3,3), padding='same', strides=1):

        x = keras.layers.BatchNormalization()(x)
        x = keras.layers.Activation(self.activation)(x)
        x = keras.layers.Conv2D(filters, kernelSize, padding=padding, strides=strides)(x)

        return x


    def identity(self, x, xInput, f, padding='same', strides=1):

        skip = keras.layers.Conv2D(f, kernel_size=(1, 1), padding=padding, strides=strides)(xInput)
        skip = keras.layers.BatchNormalization()(skip)
        output = keras.layers.Add()([skip, x])

        return output


    def residualBlock(self, xIn, f, stride):

        res = self.convBlocks(xIn, f, strides=stride)
        res = self.convBlocks(res, f, strides=1)
        output = self.identity(res, xIn, f, strides=stride)

        return output


    def upSampling(self, x, xInput):

        x = keras.layers.UpSampling2D((2,2))(x)
        x = keras.layers.Concatenate()([x, xInput])

        return x


    def encoder(self, x, filters, kernelSize=(3,3), padding='same', strides=1):

        e1 = keras.layers.Conv2D(filters[0], kernelSize, padding=padding, strides=strides)(x)
        e1 = self.convBlocks(e1, filters[0])

        shortcut = keras.layers.Conv2D(filters[0], kernel_size=(1, 1), padding=padding, strides=strides)(x)
        shortcut = keras.layers.BatchNormalization()(shortcut)
        e1Output = keras.layers.Add()([e1, shortcut])

        e2 = self.residualBlock(e1Output, filters[1], stride=2)
        e3 = self.residualBlock(e2, filters[2], stride=2)
        e4 = self.residualBlock(e3, filters[3], stride=2)
        e5 = self.residualBlock(e4, filters[4], stride=2)

        return e1Output, e2, e3, e4, e5


    def bridge(self, x, filters):

        b1 = self.convBlocks(x, filters, strides=1)
        b2 = self.convBlocks(b1, filters, strides=1)

        return b2


    def decoder(self, b2, e1, e2, e3, e4, filters, kernelSize=(3,3), padding='same', strides=1):

        x = self.upSampling(b2, e4)
        d1 = self.convBlocks(x, filters[4])
        d1 = self.convBlocks(d1, filters[4])
        d1 = self.identity(d1, x, filters[4])

        x = self.upSampling(d1, e3)
        d2 = self.convBlocks(x, filters[3])
        d2 = self.convBlocks(d2, filters[3])
        d2 = self.identity(d2, x, filters[3])

        x = self.upSampling(d2, e2)
        d3 = self.convBlocks(x, filters[2])
        d3 = self.convBlocks(d3, filters[2])
        d3 = self.identity(d3, x, filters[2])

        x = self.upSampling(d3, e1)
        d4 = self.convBlocks(x, filters[1])
        d4 = self.convBlocks(d4, filters[1])
        d4 = self.identity(d4, x, filters[1])

        return d4


    def ResUnet(self, filters = [16, 32, 64, 128, 256]):

        inputs = keras.layers.Input((self.imgDims, self.imgDims, 3))

        e1, e2, e3, e4, e5 = self.encoder(inputs, filters)
        b2 = self.bridge(e5, filters[4])
        d4 = self.decoder(b2, e1, e2, e3, e4, filters)

        x = keras.layers.Conv2D(self.nOutput, (1, 1), padding='same', activation=self.finalActivation)(d4)
        model = keras.models.Model(inputs, x)

        return model




#from lars blog
def weighted_cross_entropy(beta):
  def convert_to_logits(y_pred):

      y_pred = tf.clip_by_value(y_pred, tf.keras.backend.epsilon(), 1 - tf.keras.backend.epsilon())

      return tf.math.log(y_pred / (1 - y_pred))

  def loss(y_true, y_pred):
    y_pred = convert_to_logits(y_pred)
    loss = tf.nn.weighted_cross_entropy_with_logits(logits=y_pred, labels=y_true, pos_weight=beta)

    return tf.reduce_mean(loss)

  return loss


def trainSegmentationModel(args):

    if args['weightfile'] is not None:
        with open(args['weightfile'], 'r') as txtFile:
            weights = list(csv.reader(txtFile, delimiter=','))
        weights= [int(float(w)) for w in weights[0]]

    with open(args['paramfile']) as jsonFile:
        params = json.load(jsonFile)

    batchSize = int(params['batchSize'])
    epoch = int(params['epoch'])
    ratio = float(params['ratio'])
    augment = params['augment']

    recordsPath = args['recordpath']
    tfFiles = glob.glob(os.path.join(recordsPath,'*.tfrecords'))
    trainFiles=tfFiles[:-1]
    validFiles=tfFiles[-1:]

    trainNum = tfrecord_read.getRecordNumber(trainFiles)
    validNum = tfrecord_read.getRecordNumber(validFiles)
    trainSteps = np.floor(trainNum/batchSize)
    validSteps = np.floor(validNum/batchSize)

    if args['model'] == 'unet':
        unet =  Unet(int(params['imageDims']), nOutput = int(params['nClasses']), finalActivation=params['final'])
        model = unet.ResUnet()
    elif args['model'] == 'fcn8':
        fcn = FCN(int(params['imageDims']), nClasses = int(params['nClasses']), finalActivation=params['final'])
        model = fcn.getFCN8()
    else:
       print('No model requested, please update parameter file')
       sys.exit()

   #if args['weightfile'] is None:
       # calculateWeights

    if int(params['nClasses']) == 1:
       if args['weightfile'] is None: 
          loss = binary_crossentropy
       else:
          print('gggg')
          loss = weighted_cross_entropy(3.0)
    else:
       loss = weightedCatXEntropy

    adam = keras.optimizers.Adam()
    model.compile(optimizer=adam, loss=loss, metrics=[dice_coef])

    history = model.fit(tfrecord_read.getShards(trainFiles, batchSize=batchSize, dataSize=trainNum,  augment=augment), steps_per_epoch=trainSteps, epochs=epoch,
                      validation_data=tfrecord_read.getShards(validFiles), validation_steps=validSteps)


    model.save(args['name']+'tf.h5')

    saveModel(model, args['name'])
    saveHistory(history, args['name']+'_hist')

    #getPrediction(model, validGenerator, validIds)

if __name__ == '__main__':

    ap = argparse.ArgumentParser()
    ap.add_argument('-rp', '--recordpath', required=True, help='path to tfrecords')
    ap.add_argument('-m', '--model', required=True, help='neural network model to use')
    ap.add_argument('-n', '--name', required=True, help='name to save the model with')
    ap.add_argument('-wf', '--weightfile', help='file containing list of class weights for unbalanced datasets')
    ap.add_argument('-pf', '--paramfile', help='file containing parameters')

    args = vars(ap.parse_args())

    trainSegmentationModel(args)

enter image description hereenter image description here

2020-03-23 23:42:53.482400: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library libcudnn.so.7
19/19 [==============================] - 18s 967ms/step - loss: 0.8603 - dice_coef: 0.3880 - val_loss: 16.9239 - val_dice_coef: 0.4256
Epoch 2/120
19/19 [==============================] - 7s 373ms/step - loss: 0.6674 - dice_coef: 0.4310 - val_loss: 17.5369 - val_dice_coef: 0.4141
Epoch 3/120
19/19 [==============================] - 7s 378ms/step - loss: 0.5580 - dice_coef: 0.4642 - val_loss: 17.8778 - val_dice_coef: 0.3959
Epoch 4/120
19/19 [==============================] - 7s 377ms/step - loss: 0.4772 - dice_coef: 0.4728 - val_loss: 16.7880 - val_dice_coef: 0.4323
Epoch 5/120
19/19 [==============================] - 7s 377ms/step - loss: 0.5147 - dice_coef: 0.4678 - val_loss: 17.3139 - val_dice_coef: 0.4245
Epoch 6/120
19/19 [==============================] - 7s 380ms/step - loss: 0.4725 - dice_coef: 0.4724 - val_loss: 17.8016 - val_dice_coef: 0.4016
Epoch 7/120
19/19 [==============================] - 7s 373ms/step - loss: 0.4360 - dice_coef: 0.5010 - val_loss: 17.5032 - val_dice_coef: 0.4097
Epoch 8/120
19/19 [==============================] - 7s 375ms/step - loss: 0.3695 - dice_coef: 0.5175 - val_loss: 16.6216 - val_dice_coef: 0.4257
Epoch 9/120
19/19 [==============================] - 7s 379ms/step - loss: 0.3246 - dice_coef: 0.5339 - val_loss: 16.9754 - val_dice_coef: 0.3950
Epoch 10/120
19/19 [==============================] - 7s 375ms/step - loss: 0.3257 - dice_coef: 0.5950 - val_loss: 15.0118 - val_dice_coef: 0.3619
Epoch 11/120
19/19 [==============================] - 7s 377ms/step - loss: 0.3922 - dice_coef: 0.5141 - val_loss: 4.4225 - val_dice_coef: 0.2791
Epoch 12/120
19/19 [==============================] - 7s 376ms/step - loss: 0.2509 - dice_coef: 0.6073 - val_loss: 2.5347 - val_dice_coef: 0.4786
Epoch 13/120
19/19 [==============================] - 7s 374ms/step - loss: 0.2464 - dice_coef: 0.6163 - val_loss: 4.7887 - val_dice_coef: 0.4575
Epoch 14/120
19/19 [==============================] - 7s 375ms/step - loss: 0.2024 - dice_coef: 0.6494 - val_loss: 3.1056 - val_dice_coef: 0.3725
Epoch 15/120
19/19 [==============================] - 7s 377ms/step - loss: 0.1785 - dice_coef: 0.6711 - val_loss: 4.0611 - val_dice_coef: 0.3884
Epoch 16/120
19/19 [==============================] - 7s 376ms/step - loss: 0.2344 - dice_coef: 0.6385 - val_loss: 1.2271 - val_dice_coef: 0.4691
Epoch 17/120
19/19 [==============================] - 7s 379ms/step - loss: 0.1834 - dice_coef: 0.6582 - val_loss: 4.9993 - val_dice_coef: 0.3952
Epoch 18/120
19/19 [==============================] - 7s 376ms/step - loss: 0.1947 - dice_coef: 0.6657 - val_loss: 5.6049 - val_dice_coef: 0.3756
Epoch 19/120
19/19 [==============================] - 7s 375ms/step - loss: 0.1872 - dice_coef: 0.6697 - val_loss: 0.7881 - val_dice_coef: 0.4664
Epoch 20/120
19/19 [==============================] - 7s 374ms/step - loss: 0.1691 - dice_coef: 0.6948 - val_loss: 0.3525 - val_dice_coef: 0.6092
Epoch 21/120
19/19 [==============================] - 7s 375ms/step - loss: 0.1486 - dice_coef: 0.6958 - val_loss: 1.7557 - val_dice_coef: 0.4684
Epoch 22/120
19/19 [==============================] - 7s 377ms/step - loss: 0.1428 - dice_coef: 0.7319 - val_loss: 4.9089 - val_dice_coef: 0.2439
Epoch 23/120
19/19 [==============================] - 7s 376ms/step - loss: 0.1223 - dice_coef: 0.7662 - val_loss: 5.0603 - val_dice_coef: 0.2721
Epoch 24/120
19/19 [==============================] - 7s 376ms/step - loss: 0.1146 - dice_coef: 0.7854 - val_loss: 4.1543 - val_dice_coef: 0.3586
Epoch 25/120
19/19 [==============================] - 7s 375ms/step - loss: 0.1036 - dice_coef: 0.8073 - val_loss: 0.3904 - val_dice_coef: 0.7103
Epoch 26/120
19/19 [==============================] - 7s 374ms/step - loss: 0.1095 - dice_coef: 0.7864 - val_loss: 3.9916 - val_dice_coef: 0.4583
Epoch 27/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0971 - dice_coef: 0.8199 - val_loss: 0.3883 - val_dice_coef: 0.6763
Epoch 28/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0905 - dice_coef: 0.8206 - val_loss: 0.9827 - val_dice_coef: 0.5653
Epoch 29/120
19/19 [==============================] - 7s 374ms/step - loss: 0.1017 - dice_coef: 0.8071 - val_loss: 0.3407 - val_dice_coef: 0.6436
Epoch 30/120
19/19 [==============================] - 7s 378ms/step - loss: 0.1185 - dice_coef: 0.7693 - val_loss: 2.5590 - val_dice_coef: 0.5977
Epoch 31/120
19/19 [==============================] - 7s 374ms/step - loss: 0.1093 - dice_coef: 0.7797 - val_loss: 3.4920 - val_dice_coef: 0.5041
Epoch 32/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0889 - dice_coef: 0.8258 - val_loss: 0.6517 - val_dice_coef: 0.7779
Epoch 33/120
19/19 [==============================] - 7s 375ms/step - loss: 0.1996 - dice_coef: 0.7409 - val_loss: 17.6829 - val_dice_coef: 0.4130
Epoch 34/120
19/19 [==============================] - 7s 376ms/step - loss: 0.1839 - dice_coef: 0.6399 - val_loss: 18.2748 - val_dice_coef: 0.4018
Epoch 35/120
19/19 [==============================] - 7s 373ms/step - loss: 0.1608 - dice_coef: 0.6845 - val_loss: 17.2831 - val_dice_coef: 0.4154
Epoch 36/120
19/19 [==============================] - 7s 374ms/step - loss: 0.1627 - dice_coef: 0.7264 - val_loss: 16.0020 - val_dice_coef: 0.3925
Epoch 37/120
19/19 [==============================] - 7s 375ms/step - loss: 0.1358 - dice_coef: 0.7272 - val_loss: 11.9950 - val_dice_coef: 0.4062
Epoch 38/120
19/19 [==============================] - 7s 371ms/step - loss: 0.1085 - dice_coef: 0.7595 - val_loss: 3.8868 - val_dice_coef: 0.4795
Epoch 39/120
19/19 [==============================] - 7s 372ms/step - loss: 0.0900 - dice_coef: 0.7894 - val_loss: 0.4407 - val_dice_coef: 0.6699
Epoch 40/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0849 - dice_coef: 0.8231 - val_loss: 0.8745 - val_dice_coef: 0.7012
Epoch 41/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0773 - dice_coef: 0.8268 - val_loss: 0.6437 - val_dice_coef: 0.6693
Epoch 42/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0798 - dice_coef: 0.8382 - val_loss: 6.8845 - val_dice_coef: 0.4050
Epoch 43/120
19/19 [==============================] - 7s 378ms/step - loss: 0.0767 - dice_coef: 0.8474 - val_loss: 5.3519 - val_dice_coef: 0.3924
Epoch 44/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0729 - dice_coef: 0.8471 - val_loss: 7.9764 - val_dice_coef: 0.3954
Epoch 45/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0622 - dice_coef: 0.8717 - val_loss: 0.9704 - val_dice_coef: 0.7238
Epoch 46/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0559 - dice_coef: 0.8816 - val_loss: 5.9124 - val_dice_coef: 0.4447
Epoch 47/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0548 - dice_coef: 0.8928 - val_loss: 4.9769 - val_dice_coef: 0.5308
Epoch 48/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0488 - dice_coef: 0.8929 - val_loss: 4.1530 - val_dice_coef: 0.5803
Epoch 49/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0556 - dice_coef: 0.8994 - val_loss: 3.5601 - val_dice_coef: 0.6592
Epoch 50/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0643 - dice_coef: 0.8629 - val_loss: 6.6086 - val_dice_coef: 0.5040
Epoch 51/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0528 - dice_coef: 0.8898 - val_loss: 2.1360 - val_dice_coef: 0.7140
Epoch 52/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0449 - dice_coef: 0.9081 - val_loss: 0.9738 - val_dice_coef: 0.7868
Epoch 53/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0439 - dice_coef: 0.9159 - val_loss: 0.6328 - val_dice_coef: 0.8270
Epoch 54/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0415 - dice_coef: 0.9069 - val_loss: 0.4223 - val_dice_coef: 0.8094
Epoch 55/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0401 - dice_coef: 0.9188 - val_loss: 1.0328 - val_dice_coef: 0.7935
Epoch 56/120
19/19 [==============================] - 7s 372ms/step - loss: 0.0384 - dice_coef: 0.9227 - val_loss: 0.6739 - val_dice_coef: 0.8347
Epoch 57/120
19/19 [==============================] - 7s 372ms/step - loss: 0.0383 - dice_coef: 0.9164 - val_loss: 1.4145 - val_dice_coef: 0.7669
Epoch 58/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0373 - dice_coef: 0.9233 - val_loss: 0.8797 - val_dice_coef: 0.8024
Epoch 59/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0355 - dice_coef: 0.9147 - val_loss: 1.3352 - val_dice_coef: 0.7707
Epoch 60/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0346 - dice_coef: 0.9253 - val_loss: 1.1247 - val_dice_coef: 0.7980
Epoch 61/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0328 - dice_coef: 0.9332 - val_loss: 0.5741 - val_dice_coef: 0.8103
Epoch 62/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0347 - dice_coef: 0.9295 - val_loss: 0.4940 - val_dice_coef: 0.8456
Epoch 63/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0341 - dice_coef: 0.9242 - val_loss: 1.2802 - val_dice_coef: 0.7613
Epoch 64/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0365 - dice_coef: 0.9346 - val_loss: 8.1260 - val_dice_coef: 0.5383
Epoch 65/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0415 - dice_coef: 0.9065 - val_loss: 0.8418 - val_dice_coef: 0.7737
Epoch 66/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0415 - dice_coef: 0.9229 - val_loss: 0.6137 - val_dice_coef: 0.8029
Epoch 67/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0409 - dice_coef: 0.9133 - val_loss: 0.7414 - val_dice_coef: 0.8153
Epoch 68/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0326 - dice_coef: 0.9235 - val_loss: 0.4159 - val_dice_coef: 0.8404
Epoch 69/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0300 - dice_coef: 0.9414 - val_loss: 0.6555 - val_dice_coef: 0.8303
Epoch 70/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0303 - dice_coef: 0.9345 - val_loss: 0.6321 - val_dice_coef: 0.8413
Epoch 71/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0274 - dice_coef: 0.9432 - val_loss: 1.3845 - val_dice_coef: 0.8003
Epoch 72/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0337 - dice_coef: 0.9374 - val_loss: 1.8413 - val_dice_coef: 0.7675
Epoch 73/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0257 - dice_coef: 0.9402 - val_loss: 0.5884 - val_dice_coef: 0.8508
Epoch 74/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0248 - dice_coef: 0.9534 - val_loss: 0.4530 - val_dice_coef: 0.8600
Epoch 75/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0251 - dice_coef: 0.9457 - val_loss: 0.6024 - val_dice_coef: 0.8688
Epoch 76/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0242 - dice_coef: 0.9555 - val_loss: 0.5259 - val_dice_coef: 0.8601
Epoch 77/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0246 - dice_coef: 0.9511 - val_loss: 0.4742 - val_dice_coef: 0.8663
Epoch 78/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0221 - dice_coef: 0.9538 - val_loss: 0.7453 - val_dice_coef: 0.8572
Epoch 79/120
19/19 [==============================] - 7s 372ms/step - loss: 0.0565 - dice_coef: 0.8985 - val_loss: 9.8433 - val_dice_coef: 0.5403
Epoch 80/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0503 - dice_coef: 0.8865 - val_loss: 3.9542 - val_dice_coef: 0.7076
Epoch 81/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0384 - dice_coef: 0.9231 - val_loss: 0.7376 - val_dice_coef: 0.7746
Epoch 82/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0361 - dice_coef: 0.9250 - val_loss: 0.5281 - val_dice_coef: 0.8637
Epoch 83/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0294 - dice_coef: 0.9278 - val_loss: 0.8857 - val_dice_coef: 0.8243
Epoch 84/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0252 - dice_coef: 0.9467 - val_loss: 0.5765 - val_dice_coef: 0.8330
Epoch 85/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0275 - dice_coef: 0.9432 - val_loss: 0.6763 - val_dice_coef: 0.8251
Epoch 86/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0270 - dice_coef: 0.9383 - val_loss: 0.5945 - val_dice_coef: 0.8409
Epoch 87/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0236 - dice_coef: 0.9523 - val_loss: 0.6267 - val_dice_coef: 0.8681
Epoch 88/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0237 - dice_coef: 0.9554 - val_loss: 0.5275 - val_dice_coef: 0.8734
Epoch 89/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0226 - dice_coef: 0.9531 - val_loss: 0.4860 - val_dice_coef: 0.8775
Epoch 90/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0219 - dice_coef: 0.9543 - val_loss: 0.6794 - val_dice_coef: 0.8309
Epoch 91/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0215 - dice_coef: 0.9577 - val_loss: 0.7977 - val_dice_coef: 0.8600
Epoch 92/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0194 - dice_coef: 0.9608 - val_loss: 0.6134 - val_dice_coef: 0.8606
Epoch 93/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0197 - dice_coef: 0.9620 - val_loss: 0.5135 - val_dice_coef: 0.8610
Epoch 94/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0196 - dice_coef: 0.9629 - val_loss: 0.7956 - val_dice_coef: 0.8588
Epoch 95/120
19/19 [==============================] - 7s 378ms/step - loss: 0.0299 - dice_coef: 0.9438 - val_loss: 7.4833 - val_dice_coef: 0.5793
Epoch 96/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0307 - dice_coef: 0.9262 - val_loss: 0.9232 - val_dice_coef: 0.8360
Epoch 97/120
19/19 [==============================] - 7s 378ms/step - loss: 0.0262 - dice_coef: 0.9426 - val_loss: 0.5594 - val_dice_coef: 0.8638
Epoch 98/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0200 - dice_coef: 0.9564 - val_loss: 0.6581 - val_dice_coef: 0.8586
Epoch 99/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0211 - dice_coef: 0.9600 - val_loss: 0.7802 - val_dice_coef: 0.8661
Epoch 100/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0189 - dice_coef: 0.9610 - val_loss: 0.7265 - val_dice_coef: 0.8554
Epoch 101/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0302 - dice_coef: 0.9543 - val_loss: 1.3809 - val_dice_coef: 0.7834
Epoch 102/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0557 - dice_coef: 0.8781 - val_loss: 0.8438 - val_dice_coef: 0.7659
Epoch 103/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0467 - dice_coef: 0.8987 - val_loss: 7.1204 - val_dice_coef: 0.4327
Epoch 104/120
19/19 [==============================] - 7s 379ms/step - loss: 0.0332 - dice_coef: 0.9198 - val_loss: 2.7852 - val_dice_coef: 0.6586
Epoch 105/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0247 - dice_coef: 0.9505 - val_loss: 2.4379 - val_dice_coef: 0.7220
Epoch 106/120
19/19 [==============================] - 7s 372ms/step - loss: 0.0459 - dice_coef: 0.9279 - val_loss: 4.4471 - val_dice_coef: 0.4627
Epoch 107/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0451 - dice_coef: 0.9048 - val_loss: 6.7964 - val_dice_coef: 0.5332
Epoch 108/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0326 - dice_coef: 0.9374 - val_loss: 1.3322 - val_dice_coef: 0.7683
Epoch 109/120
19/19 [==============================] - 7s 372ms/step - loss: 0.0831 - dice_coef: 0.8437 - val_loss: 2.0922 - val_dice_coef: 0.4286
Epoch 110/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0476 - dice_coef: 0.8956 - val_loss: 6.4227 - val_dice_coef: 0.4907
Epoch 111/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0295 - dice_coef: 0.9319 - val_loss: 3.0062 - val_dice_coef: 0.7026
Epoch 112/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0245 - dice_coef: 0.9472 - val_loss: 2.8628 - val_dice_coef: 0.6768
Epoch 113/120
19/19 [==============================] - 7s 376ms/step - loss: 0.0246 - dice_coef: 0.9525 - val_loss: 1.4482 - val_dice_coef: 0.7820
Epoch 114/120
19/19 [==============================] - 7s 371ms/step - loss: 0.0212 - dice_coef: 0.9555 - val_loss: 0.6319 - val_dice_coef: 0.8298
Epoch 115/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0246 - dice_coef: 0.9467 - val_loss: 1.8630 - val_dice_coef: 0.7160
Epoch 116/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0206 - dice_coef: 0.9568 - val_loss: 0.7871 - val_dice_coef: 0.8268
Epoch 117/120
19/19 [==============================] - 7s 375ms/step - loss: 0.0186 - dice_coef: 0.9673 - val_loss: 1.0556 - val_dice_coef: 0.7975
Epoch 118/120
19/19 [==============================] - 7s 373ms/step - loss: 0.0180 - dice_coef: 0.9652 - val_loss: 1.8131 - val_dice_coef: 0.7766
Epoch 119/120
19/19 [==============================] - 7s 377ms/step - loss: 0.0169 - dice_coef: 0.9655 - val_loss: 0.6609 - val_dice_coef: 0.8506
Epoch 120/120
19/19 [==============================] - 7s 374ms/step - loss: 0.0160 - dice_coef: 0.9690 - val_loss: 1.1710 - val_dice_coef: 0.8115
Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...