Потеря обучения в эпоху регрессионной модели идет вверх и вниз.Кроме того, производительность модели на тестовом наборе относительно низкая (средняя ошибка ~ 40%) - PullRequest
0 голосов
/ 29 сентября 2019

Я пытаюсь построить регрессионную модель для прогнозирования цен на квартиры с использованием Keras.Во время тренировочных тренировок потери сильно растут и практически не улучшаются.В чем здесь проблема?Я попытался удалить, упорядочить, сделать сеть глубже, мельче, шире, уже, увеличивать и уменьшать скорость обучения, увеличивать и уменьшать набор проверки, увеличивать и уменьшать размер пакета.

import pandas as pd
from keras import Sequential
from keras import regularizers
from keras.callbacks import EarlyStopping
from keras.wrappers.scikit_learn import KerasRegressor
from keras.layers import Dense, Dropout, BatchNormalization
from keras.constraints import NonNeg
from keras.optimizers import Adam, RMSprop, SGD
import numpy as np
from sklearn.preprocessing import StandardScaler, MinMaxScaler

df = pd.read_excel('data.xlsx', sheet_name='Лист1')
df = df.sample(frac=1, random_state=1).reset_index(drop=True)

train_data = df.loc[0:145008, 'id_poligon_level_3':'kitchen_area']
y_label = df.loc[0:145008, 'price_usd':'price_usd']

pred_test = df.loc[145008:, 'id_poligon_level_3':'kitchen_area']
pred_y_label = df.loc[145008:, 'price_usd':'price_usd']

#maxPrice = np.max(df.loc[0:146008, 'price_1sq_meter_usd':'price_1sq_meter_usd'].values)

scale_train_X = StandardScaler()
train_data = scale_train_X.fit_transform(train_data.values)
scale_train_Y = StandardScaler()
y_label = scale_train_Y.fit_transform(y_label.values)
#y_label /= maxPrice
pred_test = scale_train_X.fit_transform(pred_test.values)
#pred_y_label /= maxPrice
pred_y_label = scale_train_Y.transform(pred_y_label.values)

'''
data = xl.load_workbook(filename='data.xlsx')
sheet = data['Лист1']'''

#maxPrice = np.max([val[0].value for val in sheet['H2:H146009']])

'''
def get_training_data():

    train_data = []
    for j in range(2, 146010):
        for val in sheet['B'+str(j)+':H'+str(j)]:
            train_set = []
            train_set.append([cell.value for cell in val])
            train_data.append(train_set)
    try:
        assert train_data is not None
        print('Getting training data process finished successfully')
        return train_data
    except AssertionError:
        print("Error")


def get_y_label():

    train_set = [val[0].value for val in sheet['L2:L146010']]
    train_set /= maxPrice

    try:
        assert train_set is not None
        print('Getting y label data process finished successfully')
        return train_set
    except AssertionError:
        print(AssertionError)


train_data = get_training_data()

train_data = np.asarray(train_data)
train_data = np.reshape(train_data, [1, train_data.shape[0], 7])
train_data = train_data[0]

for i in range(0, train_data.shape[0]):
    summ = 0
    for j in str(int(train_data[i][0])):
        summ += int(j)
    train_data[i][0] = summ

y_label = train_data.T[6]
y_label = np.reshape(y_label, [1, y_label.shape[0]])

test_data = train_data[87000:]
train_data = train_data[:87000]

test_data = test_data.T[:6]
train_data = train_data.T[:6]

y_label_test = y_label[0][87000:]
y_label = y_label[0][:87000]

#y_label_test /= maxPrice
#y_label /= maxPrice
y_label = np.reshape(y_label, [1, y_label.shape[0]])
y_label_test = np.reshape(y_label_test, [1, y_label_test.shape[0]])

'''
'''
def get_test_data():

    test_data = []
    for j in range(100001, 146010):
        for val in sheet['B'+str(j)+':I'+str(j)]:
            train_set = []
            train_set.append([cell.value for cell in val])
            test_data.append(train_set)

    try:
        assert test_data is not None
        print('Getting test data process finished successfully')
        return test_data
    except AssertionError:
        print("Error")


def get_test_label():

    test_set = [val[0].value for val in sheet['L100001:L146009']]
    test_set /= maxPrice

    try:
        assert test_set is not None
        print('Getting y label test data process finished successfully')
        return test_set
    except AssertionError:
        print(AssertionError)


test_data = get_test_data()
y_label_test = get_test_label()

test_data = np.asarray(test_data)
test_data = np.reshape(test_data, [1, test_data.shape[0], 8])
test_data = test_data[0]

y_label_test = np.asarray(y_label_test)
y_label_test = np.reshape(y_label_test, [1, y_label_test.shape[0]])

assert test_data.shape[0] == y_label_test.shape[1]


sc_train_X = StandardScaler().fit(train_data)
sc_train_Y = StandardScaler().fit(y_label)
sc_test_X = StandardScaler().fit(test_data)
sc_test_Y = StandardScaler().fit(y_label_test)

train_data = sc_train_X.transform(train_data)
test_data = sc_test_X.transform(test_data)
y_label_test = sc_test_Y.transform(y_label_test)
y_label = sc_train_Y.transform(y_label)

'''
def build_regression():

    model = Sequential([
        Dense(64, activation='relu', input_shape=(6,), kernel_initializer='random_normal', bias_initializer='Zeros'),
        #BatchNormalization(),
        #Dropout(0.2),
        Dense(64, activation='relu', kernel_initializer='random_normal', bias_initializer='Zeros'),
        #BatchNormalization(),
        #Dropout(0.1),
        Dense(1, activation='linear', kernel_initializer='random_normal', bias_initializer='Zeros'),
        ])

    opt = Adam(lr=1e-4, decay=1e-3 / 200) #Adam(lr=0.001, beta_1=0.9, beta_2=0.98)
    model.compile(optimizer=opt, loss='mse')
    return model

estimator = build_regression()#KerasRegressor(build_regression, batch_size=16, epochs=100, validation_split=0.3)

estimator.fit(x=train_data, y=y_label, batch_size=16, epochs=100, validation_split=0.3, shuffle=False, callbacks=[EarlyStopping(monitor='loss', patience=5)])

#estimator.fit(x=train_data, y=y_label, batch_size=96, epochs=250, validation_split=0.2, shuffle=True)

predict = estimator.predict(pred_test)
predict = scale_train_Y.inverse_transform(predict)
pred_y_label = scale_train_Y.inverse_transform(pred_y_label)

diff = predict - pred_y_label
percDiff = (diff / pred_y_label) * 100
absPercDiff = np.abs(percDiff)
mean_perc = np.mean(absPercDiff)
std_perc = np.std(absPercDiff)

print('Mean error percentage: {} \nStandard error percentage: {}'.format(mean_perc, std_perc))

'''
model_json = estimator.to_json()
with open("model.json", "w") as json_file:
    json_file.write(model_json)
# serialize weights to HDF5
regressor.save_weights("model.h5")
print("Saved model to disk")
'''

Вывод:

Train on 101506 samples, validate on 43503 samples
Epoch 1/100
2019-09-29 12:41:56.065057: I tensorflow/core/platform/cpu_feature_guard.cc:142] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2

    16/101506 [..............................] - ETA: 36:14 - loss: 0.1636
   896/101506 [..............................] - ETA: 44s - loss: 0.6881  
  1520/101506 [..............................] - ETA: 29s - loss: 0.5158
  2192/101506 [..............................] - ETA: 22s - loss: 0.6153
  3088/101506 [..............................] - ETA: 17s - loss: 0.8603
  4016/101506 [>.............................] - ETA: 14s - loss: 0.7997
  4944/101506 [>.............................] - ETA: 12s - loss: 0.9174
  5840/101506 [>.............................] - ETA: 11s - loss: 1.0359
  6688/101506 [>.............................] - ETA: 10s - loss: 0.9711
  7472/101506 [=>............................] - ETA: 10s - loss: 0.9186
  8336/101506 [=>............................] - ETA: 9s - loss: 0.8943 
  9232/101506 [=>............................] - ETA: 8s - loss: 0.8843
 10128/101506 [=>............................] - ETA: 8s - loss: 0.8377
 11056/101506 [==>...........................] - ETA: 8s - loss: 0.7994
 11984/101506 [==>...........................] - ETA: 7s - loss: 0.7640
 12912/101506 [==>...........................] - ETA: 7s - loss: 0.7327
 13808/101506 [===>..........................] - ETA: 7s - loss: 0.7647
 14736/101506 [===>..........................] - ETA: 7s - loss: 0.7474
 15664/101506 [===>..........................] - ETA: 6s - loss: 0.7393
 16576/101506 [===>..........................] - ETA: 6s - loss: 0.7204
 17504/101506 [====>.........................] - ETA: 6s - loss: 0.6941
 18448/101506 [====>.........................] - ETA: 6s - loss: 0.6858
 19392/101506 [====>.........................] - ETA: 6s - loss: 0.6624
 20320/101506 [=====>........................] - ETA: 5s - loss: 0.6431
 21248/101506 [=====>........................] - ETA: 5s - loss: 0.6349
 22192/101506 [=====>........................] - ETA: 5s - loss: 0.6385
 23120/101506 [=====>........................] - ETA: 5s - loss: 0.6409
 24064/101506 [======>.......................] - ETA: 5s - loss: 0.6310
 25008/101506 [======>.......................] - ETA: 5s - loss: 0.6638
 25904/101506 [======>.......................] - ETA: 5s - loss: 0.6495
 26752/101506 [======>.......................] - ETA: 5s - loss: 0.6451
 27616/101506 [=======>......................] - ETA: 5s - loss: 0.6402
 28448/101506 [=======>......................] - ETA: 5s - loss: 0.6285
 29360/101506 [=======>......................] - ETA: 4s - loss: 0.6177
 30208/101506 [=======>......................] - ETA: 4s - loss: 0.6194
 31056/101506 [========>.....................] - ETA: 4s - loss: 0.6105
 31936/101506 [========>.....................] - ETA: 4s - loss: 0.6181
 32800/101506 [========>.....................] - ETA: 4s - loss: 0.6073
 33632/101506 [========>.....................] - ETA: 4s - loss: 0.5983
 34496/101506 [=========>....................] - ETA: 4s - loss: 0.5917
 35360/101506 [=========>....................] - ETA: 4s - loss: 0.5834
 36208/101506 [=========>....................] - ETA: 4s - loss: 0.5833
 37104/101506 [=========>....................] - ETA: 4s - loss: 0.5940
 37952/101506 [==========>...................] - ETA: 4s - loss: 0.5937
 38848/101506 [==========>...................] - ETA: 4s - loss: 0.5950
 39744/101506 [==========>...................] - ETA: 4s - loss: 0.5859
 40656/101506 [===========>..................] - ETA: 3s - loss: 0.5809
 41568/101506 [===========>..................] - ETA: 3s - loss: 0.5721
 42464/101506 [===========>..................] - ETA: 3s - loss: 0.5673
 43376/101506 [===========>..................] - ETA: 3s - loss: 0.5745
 44272/101506 [============>.................] - ETA: 3s - loss: 0.5830
 45168/101506 [============>.................] - ETA: 3s - loss: 0.5803
 46016/101506 [============>.................] - ETA: 3s - loss: 0.5893
 46992/101506 [============>.................] - ETA: 3s - loss: 0.5844
 47920/101506 [=============>................] - ETA: 3s - loss: 0.5794
 48816/101506 [=============>................] - ETA: 3s - loss: 0.5725
 49712/101506 [=============>................] - ETA: 3s - loss: 0.5688
 50656/101506 [=============>................] - ETA: 3s - loss: 0.5686
 51568/101506 [==============>...............] - ETA: 3s - loss: 0.5622
 52496/101506 [==============>...............] - ETA: 3s - loss: 0.5594
 53392/101506 [==============>...............] - ETA: 3s - loss: 0.5529
 54320/101506 [===============>..............] - ETA: 2s - loss: 0.5494
 55248/101506 [===============>..............] - ETA: 2s - loss: 0.5438
 56176/101506 [===============>..............] - ETA: 2s - loss: 0.5374
 57104/101506 [===============>..............] - ETA: 2s - loss: 0.5347
 58032/101506 [================>.............] - ETA: 2s - loss: 0.5325
 58960/101506 [================>.............] - ETA: 2s - loss: 0.5272
 59904/101506 [================>.............] - ETA: 2s - loss: 0.5218
 60848/101506 [================>.............] - ETA: 2s - loss: 0.5171
 61776/101506 [=================>............] - ETA: 2s - loss: 0.5180
 62704/101506 [=================>............] - ETA: 2s - loss: 0.5206
 63648/101506 [=================>............] - ETA: 2s - loss: 0.5192
 64528/101506 [==================>...........] - ETA: 2s - loss: 0.5205
 65392/101506 [==================>...........] - ETA: 2s - loss: 0.5179
 66304/101506 [==================>...........] - ETA: 2s - loss: 0.5147
 67216/101506 [==================>...........] - ETA: 2s - loss: 0.5106
 68096/101506 [===================>..........] - ETA: 2s - loss: 0.5122
 68960/101506 [===================>..........] - ETA: 1s - loss: 0.5087
 69824/101506 [===================>..........] - ETA: 1s - loss: 0.5048
 70704/101506 [===================>..........] - ETA: 1s - loss: 0.5066
 71584/101506 [====================>.........] - ETA: 1s - loss: 0.5114
 72480/101506 [====================>.........] - ETA: 1s - loss: 0.5068
 73344/101506 [====================>.........] - ETA: 1s - loss: 0.5029
 74224/101506 [====================>.........] - ETA: 1s - loss: 0.5022
 75104/101506 [=====================>........] - ETA: 1s - loss: 0.5036
 75984/101506 [=====================>........] - ETA: 1s - loss: 0.5017
 76816/101506 [=====================>........] - ETA: 1s - loss: 0.4990
 77600/101506 [=====================>........] - ETA: 1s - loss: 0.4959
 78416/101506 [======================>.......] - ETA: 1s - loss: 0.5045
 79248/101506 [======================>.......] - ETA: 1s - loss: 0.5044
 80080/101506 [======================>.......] - ETA: 1s - loss: 0.5015
 80912/101506 [======================>.......] - ETA: 1s - loss: 0.5000
 81728/101506 [=======================>......] - ETA: 1s - loss: 0.4969
 82576/101506 [=======================>......] - ETA: 1s - loss: 0.4954
 83424/101506 [=======================>......] - ETA: 1s - loss: 0.4917
 84288/101506 [=======================>......] - ETA: 1s - loss: 0.4908
 85200/101506 [========================>.....] - ETA: 0s - loss: 0.4895
 86128/101506 [========================>.....] - ETA: 0s - loss: 0.4898
 87040/101506 [========================>.....] - ETA: 0s - loss: 0.4914
 87936/101506 [========================>.....] - ETA: 0s - loss: 0.5128
 88848/101506 [=========================>....] - ETA: 0s - loss: 0.5126
 89744/101506 [=========================>....] - ETA: 0s - loss: 0.5132
 90624/101506 [=========================>....] - ETA: 0s - loss: 0.5102
 91536/101506 [==========================>...] - ETA: 0s - loss: 0.5073
 92448/101506 [==========================>...] - ETA: 0s - loss: 0.5036
 93344/101506 [==========================>...] - ETA: 0s - loss: 0.5030
 94240/101506 [==========================>...] - ETA: 0s - loss: 0.5010
 95104/101506 [===========================>..] - ETA: 0s - loss: 0.4980
 95984/101506 [===========================>..] - ETA: 0s - loss: 0.4956
 96896/101506 [===========================>..] - ETA: 0s - loss: 0.4931
 97776/101506 [===========================>..] - ETA: 0s - loss: 0.4928
 98656/101506 [============================>.] - ETA: 0s - loss: 0.4901
 99552/101506 [============================>.] - ETA: 0s - loss: 0.4872
100448/101506 [============================>.] - ETA: 0s - loss: 0.4875
101328/101506 [============================>.] - ETA: 0s - loss: 0.4850
101506/101506 [==============================] - 7s 69us/step - loss: 0.4844 - val_loss: 0.3809
Epoch 2/100

    16/101506 [..............................] - ETA: 18s - loss: 0.0778
   928/101506 [..............................] - ETA: 5s - loss: 0.1726 
  1840/101506 [..............................] - ETA: 5s - loss: 0.1622
  2688/101506 [..............................] - ETA: 5s - loss: 0.2834
  3568/101506 [>.............................] - ETA: 5s - loss: 0.2775
  4448/101506 [>.............................] - ETA: 5s - loss: 0.2740
  5360/101506 [>.............................] - ETA: 5s - loss: 0.3466
  6288/101506 [>.............................] - ETA: 5s - loss: 0.3877
  7232/101506 [=>............................] - ETA: 5s - loss: 0.3647
  8144/101506 [=>............................] - ETA: 5s - loss: 0.3463
  9072/101506 [=>............................] - ETA: 5s - loss: 0.3689
  9984/101506 [=>............................] - ETA: 5s - loss: 0.3552
 10928/101506 [==>...........................] - ETA: 5s - loss: 0.3515
 11856/101506 [==>...........................] - ETA: 4s - loss: 0.3333
 12784/101506 [==>...........................] - ETA: 4s - loss: 0.3246
 13696/101506 [===>..........................] - ETA: 4s - loss: 0.3461
 14640/101506 [===>..........................] - ETA: 4s - loss: 0.3441
 15568/101506 [===>..........................] - ETA: 4s - loss: 0.3416
 16464/101506 [===>..........................] - ETA: 4s - loss: 0.3337
 17392/101506 [====>.........................] - ETA: 4s - loss: 0.3213
 18288/101506 [====>.........................] - ETA: 4s - loss: 0.3251
 19200/101506 [====>.........................] - ETA: 4s - loss: 0.3165
 20096/101506 [====>.........................] - ETA: 4s - loss: 0.3106
 21008/101506 [=====>........................] - ETA: 4s - loss: 0.3036
 21920/101506 [=====>........................] - ETA: 4s - loss: 0.3175
 22816/101506 [=====>........................] - ETA: 4s - loss: 0.3159
 23712/101506 [======>.......................] - ETA: 4s - loss: 0.3225
 24608/101506 [======>.......................] - ETA: 4s - loss: 0.3269
 25520/101506 [======>.......................] - ETA: 4s - loss: 0.3477
 26400/101506 [======>.......................] - ETA: 4s - loss: 0.3490
 27328/101506 [=======>......................] - ETA: 4s - loss: 0.3496
 28272/101506 [=======>......................] - ETA: 4s - loss: 0.3469
 29200/101506 [=======>......................] - ETA: 4s - loss: 0.3442
 30112/101506 [=======>......................] - ETA: 3s - loss: 0.3502
 31008/101506 [========>.....................] - ETA: 3s - loss: 0.3480
 31904/101506 [========>.....................] - ETA: 3s - loss: 0.3549
 32832/101506 [========>.....................] - ETA: 3s - loss: 0.3537
 33792/101506 [========>.....................] - ETA: 3s - loss: 0.3507
 34704/101506 [=========>....................] - ETA: 3s - loss: 0.3510
 35616/101506 [=========>....................] - ETA: 3s - loss: 0.3505
 36528/101506 [=========>....................] - ETA: 3s - loss: 0.3498
 37488/101506 [==========>...................] - ETA: 3s - loss: 0.3664
 38416/101506 [==========>...................] - ETA: 3s - loss: 0.3635
 39344/101506 [==========>...................] - ETA: 3s - loss: 0.3681
 40272/101506 [==========>...................] - ETA: 3s - loss: 0.3648
 41232/101506 [===========>..................] - ETA: 3s - loss: 0.3614
 42192/101506 [===========>..................] - ETA: 3s - loss: 0.3578
 43152/101506 [===========>..................] - ETA: 3s - loss: 0.3639
 44080/101506 [============>.................] - ETA: 3s - loss: 0.3728
 45008/101506 [============>.................] - ETA: 3s - loss: 0.3785
 45968/101506 [============>.................] - ETA: 3s - loss: 0.3899
 46864/101506 [============>.................] - ETA: 3s - loss: 0.3891
 47776/101506 [=============>................] - ETA: 2s - loss: 0.3859
 48672/101506 [=============>................] - ETA: 2s - loss: 0.3831
 49568/101506 [=============>................] - ETA: 2s - loss: 0.3818
 50448/101506 [=============>................] - ETA: 2s - loss: 0.3825
 51376/101506 [==============>...............] - ETA: 2s - loss: 0.3814
 52288/101506 [==============>...............] - ETA: 2s - loss: 0.3807
 53200/101506 [==============>...............] - ETA: 2s - loss: 0.3764
 54144/101506 [===============>..............] - ETA: 2s - loss: 0.3758
 55072/101506 [===============>..............] - ETA: 2s - loss: 0.3733
 55968/101506 [===============>..............] - ETA: 2s - loss: 0.3696
 56864/101506 [===============>..............] - ETA: 2s - loss: 0.3698
 57824/101506 [================>.............] - ETA: 2s - loss: 0.3687
 58768/101506 [================>.............] - ETA: 2s - loss: 0.3659
 59696/101506 [================>.............] - ETA: 2s - loss: 0.3630
 60624/101506 [================>.............] - ETA: 2s - loss: 0.3606
 61552/101506 [=================>............] - ETA: 2s - loss: 0.3633
 62464/101506 [=================>............] - ETA: 2s - loss: 0.3683
 63376/101506 [=================>............] - ETA: 2s - loss: 0.3688
 64320/101506 [==================>...........] - ETA: 2s - loss: 0.3670
 65248/101506 [==================>...........] - ETA: 1s - loss: 0.3695
 66176/101506 [==================>...........] - ETA: 1s - loss: 0.3686
 67088/101506 [==================>...........] - ETA: 1s - loss: 0.3656
 67936/101506 [===================>..........] - ETA: 1s - loss: 0.3676
 68816/101506 [===================>..........] - ETA: 1s - loss: 0.3651
 69712/101506 [===================>..........] - ETA: 1s - loss: 0.3639
 70624/101506 [===================>..........] - ETA: 1s - loss: 0.3671
 71536/101506 [====================>.........] - ETA: 1s - loss: 0.3728
 72448/101506 [====================>.........] - ETA: 1s - loss: 0.3699
 73344/101506 [====================>.........] - ETA: 1s - loss: 0.3676
 74256/101506 [====================>.........] - ETA: 1s - loss: 0.3686
 75184/101506 [=====================>........] - ETA: 1s - loss: 0.3707
 76112/101506 [=====================>........] - ETA: 1s - loss: 0.3698
 77040/101506 [=====================>........] - ETA: 1s - loss: 0.3684
 77968/101506 [======================>.......] - ETA: 1s - loss: 0.3680
 78960/101506 [======================>.......] - ETA: 1s - loss: 0.3762
 79872/101506 [======================>.......] - ETA: 1s - loss: 0.3766
 80816/101506 [======================>.......] - ETA: 1s - loss: 0.3755
 81728/101506 [=======================>......] - ETA: 1s - loss: 0.3736
 82672/101506 [=======================>......] - ETA: 1s - loss: 0.3733
 83552/101506 [=======================>......] - ETA: 0s - loss: 0.3708
 84528/101506 [=======================>......] - ETA: 0s - loss: 0.3706
 85456/101506 [========================>.....] - ETA: 0s - loss: 0.3699
 86336/101506 [========================>.....] - ETA: 0s - loss: 0.3712
 87264/101506 [========================>.....] - ETA: 0s - loss: 0.3740
 88192/101506 [=========================>....] - ETA: 0s - loss: 0.3960
 89072/101506 [=========================>....] - ETA: 0s - loss: 0.3997
 89984/101506 [=========================>....] - ETA: 0s - loss: 0.3976
 90928/101506 [=========================>....] - ETA: 0s - loss: 0.3951
 91840/101506 [==========================>...] - ETA: 0s - loss: 0.3932
 92768/101506 [==========================>...] - ETA: 0s - loss: 0.3909
 93664/101506 [==========================>...] - ETA: 0s - loss: 0.3916
 94576/101506 [==========================>...] - ETA: 0s - loss: 0.3896
 95488/101506 [===========================>..] - ETA: 0s - loss: 0.3882
 96416/101506 [===========================>..] - ETA: 0s - loss: 0.3860
 97328/101506 [===========================>..] - ETA: 0s - loss: 0.3867
 98240/101506 [============================>.] - ETA: 0s - loss: 0.3851
 99152/101506 [============================>.] - ETA: 0s - loss: 0.3825
100048/101506 [============================>.] - ETA: 0s - loss: 0.3818
100960/101506 [============================>.] - ETA: 0s - loss: 0.3817
101506/101506 [==============================] - 6s 64us/step - loss: 0.3808 - val_loss: 0.3671

После 100 эпох потеря остается около 0,36

Ответы [ 2 ]

0 голосов
/ 30 сентября 2019

Ваши результаты выглядят хорошими, если не хороши для данных, которые у вас есть;val_loss очень близко к loss, что в принципе идеально.В абсолютном выражении .36 потери при проверке - это очень хорошо - <.25 было бы отлично - но основной показатель - это то, что он близок к потере вашего поезда, поэтому data является наиболее вероятным ограничивающим фактором.

Неразбериха может быть из названия: «Средняя ошибка ~ 40%»: Утрата! = Ошибка .Для регрессии «точность» не определена (вы можете только измерить «близость» прогнозов к меткам; если вы измеряете точность, как с классификацией, она всегда будет близка к нулю, так как вам нужно будет сопоставить метки, которыеявляются числами с плавающей точкой32 *, 1011 * точно ).Если вы хотите получить еще лучшее представление о том, как работает ваша модель, построите прогнозы и метки.

0 голосов
/ 29 сентября 2019

Похоже, вы только что достигли локальных минимумов, которые не предназначены.Также вы можете попробовать следующее:

  • Изменить оптимизатор.
  • Повысить скорость обучения таким образом, чтобы вы просто превышали локальные минимумы с помощью хорошо разработанногозатухание (вам может потребоваться высокое затухание) для отслеживания лучшего значения потерь.
  • Изменение функций активации.

ИЛИ

Возможная вещь, котораяВы должны подозревать, что сами данные .Это может быть настолько шумно, чтобы быть встроенным. Вы должны проверить, есть ли у вас зашумленные данные, или просто проверить с помощью matplotlib в этом случае.Однако не забывайте, что, как я понимаю из вашего кода, он может быть вычислительно дорогим, поскольку у вас есть около 200 тыс. Точек.

Если это сложно проверить при построении графика, вы можете исключить зашумленные данные.с некоторыми фильтрами, которые могут отличаться от самой ситуации.

...