Вот код!
Я использую tenorflow, чтобы получить набор данных mnist и использую простой график логистической регрессии.
import tensorflow as tf
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data(path='mnist.npz')
x_train,x_test = tf.reshape(x_train,[len(x_train),784]),tf.reshape(x_test,[len(x_test),784])
Использование tf.one_hot для получения одного горячего кодирования
y_train,y_test = tf.one_hot(y_train,10),tf.one_hot(y_test,10)
learning_rate = 0.01
training_epochs = 25
batch_size = 100
display_step = 1
tf Ввод графика
x = tf.placeholder(tf.float32,[None,784])
y = tf.placeholder(tf.float32,[None,10])
W = tf.Variable(tf.zeros([784,10]))
b = tf.Variable(tf.zeros([10]))
pred = tf.nn.softmax(tf.matmul(x,W)+b)
Стоимость
cost = tf.reduce_mean(-tf.reduce_sum(y*tf.log(pred),reduction_indices = 1))
Оптимизатор
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
print(sess.run(W))
X_train,X_test = sess.run(x_train).astype('float32'),sess.run(x_test).astype('float32')
Y_train,Y_test = sess.run(y_train).astype('float32'),sess.run(y_test).astype('float32')
for epoch in range(training_epochs):
avg_cost = 0
total_batch = int(60000/batch_size)
for i in range(total_batch):
batch_x,batch_y = X_train[i*batch_size:i*batch_size+total_batch],Y_train[i*batch_size:i*batch_size+total_batch]
_,c = sess.run([optimizer,cost],feed_dict = {x:batch_x,y:batch_y})
avg_cost += c/total_batch
if (epoch+1) % display_step == 0:
print("Epoch:",(epoch+1),"cost=",avg_cost,"W = ",sess.run(W))
print("Optimization Finished!")