Исключение с плавающей точкой при использовании CNN с керасом - PullRequest
0 голосов
/ 12 января 2020

Я пытаюсь реализовать простую CNN в керасе для регрессии. Однако, когда я увеличиваю количество уровней в моей сети, это дает исключение с плавающей запятой.

Моя модель определена ниже


model.py


def create_model(image_shape):
    config = get_config()
    kernel_size = config['kernel_size']
    out_channel = config['out_channel']
    n_layers = config['n_layers']
    padding = config['padding']
    dense_units = config['dense_units']
    dropout = config["dropout"]
    pool_size = config['pool_size']
    activation = config['activation']
    lr = config['lr']
    l2 = config['l2']
    model = Sequential()

    # First layer of Batch norm

    model.add(Conv2D(out_channel, kernel_size=(kernel_size, kernel_size),
                        padding=padding,
                        input_shape=image_shape, activation=activation))
    model.add(MaxPooling2D(pool_size=(pool_size, pool_size), padding=padding))

    for i in range(n_layers):
        model.add(Conv2D((out_channel*(2^(i))), kernel_size=(kernel_size, kernel_size), activation=activation, padding=padding, kernel_regularizer=regularizers.l2(l2)))
        model.add(Dropout(dropout))
        model.add(MaxPooling2D(pool_size=(pool_size, pool_size), padding=padding))

    model.add(Conv2D((out_channel), kernel_size=(kernel_size, kernel_size), activation=activation, padding=padding))
    model.add(Dropout(dropout))
    model.add(MaxPooling2D(pool_size=(pool_size, pool_size), padding=padding))

    model.add(Flatten())
    model.add(Dense(dense_units, activation=activation))

    model.add(Dense(dense_units, activation=activation))

    model.add(Dense(4, activation='linear'))
    model.compile(loss='mean_squared_error', optimizer=Adadelta(lr=lr), metrics=['mse', 'mae'])

    return model

, а конфигурация гиперпараметра определена в следующем файле


config.py


config = dict()
config['n_layers'] = 3
config['out_channel'] = 8
config['kernel_size'] = 3
config['pool_size'] = 2
config['dropout'] = 0.25
config['dense_units'] = 128
config['activation'] = 'relu'
config['padding'] = 'SAME'
config['lr'] = 0.4
config['batch_size'] = 32
config['epochs'] = 10
config['l2'] = 0.02

Трассировка стека:

WARNING: Logging before flag parsing goes to stderr.
W0114 13:45:37.312639 4481535424 deprecation_wrapper.py:119] From /usr/local/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py:74: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead.

W0114 13:45:37.326518 4481535424 deprecation_wrapper.py:119] From /usr/local/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py:517: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.

W0114 13:45:37.328626 4481535424 deprecation_wrapper.py:119] From /usr/local/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py:4138: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead.

W0114 13:45:37.344048 4481535424 deprecation_wrapper.py:119] From /usr/local/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py:3976: The name tf.nn.max_pool is deprecated. Please use tf.nn.max_pool2d instead.

W0114 13:45:37.371679 4481535424 deprecation_wrapper.py:119] From /usr/local/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py:133: The name tf.placeholder_with_default is deprecated. Please use tf.compat.v1.placeholder_with_default instead.

W0114 13:45:37.378800 4481535424 deprecation.py:506] From /usr/local/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py:3445: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.
Instructions for updating:
Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.
zsh: floating point exception  python3 TaskA/Train.py
...