Вы пишете орфографическую эпоху, а также вы должны вернуть accuracy
не acc
.
from tensorflow.keras.layers import Input, Dense, Add, Activation, Flatten
from tensorflow.keras.models import Model, Sequential
import tensorflow as tf
import numpy as np
import random
from tensorflow.python.keras.layers import Input, GaussianNoise, BatchNormalization
def train_mnist():
class myCallback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch,logs={}):
print(logs.get('accuracy'))
if (logs.get('accuracy')>0.9):
print("Reached 90% accuracy so cancelling training!")
self.model.stop_training=True
mnist = tf.keras.datasets.mnist
(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train= x_train/255.0
x_test= x_test/255.0
callbacks=myCallback()
model = tf.keras.models.Sequential([
# YOUR CODE SHOULD START HERE
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(256, activation=tf.nn.relu),
tf.keras.layers.Dense(10, activation=tf.nn.softmax)
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# model fitting
history = model.fit(x_train,y_train, epochs=10,callbacks=[callbacks])
# model fitting
return history.epoch, history.history['accuracy'][-1]
train_mnist()
Epoch 1/10
1859/1875 [============================>.] - ETA: 0s - loss: 0.2273 - accuracy: 0.93580.93586665391922
Reached 90% accuracy so cancelling training!
1875/1875 [==============================] - 3s 2ms/step - loss: 0.2265 - accuracy: 0.9359
([0], 0.93586665391922)