使用softmax对mnist图片分类,并获取预测的准确度
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('MNIST_data/', one_hot=True)
x_data = tf.placeholder(tf.float32, [None, 28 * 28])
y_data = tf.placeholder(tf.float32, [None, 10])
#创建一个隐藏层,输入数据:x_data, 输出10个神经元,激励函数使用softmax
prediction = tf.layers.dense(x_data, 10, tf.nn.softmax)
# tf.reduce_sum的用法
# x is [[1, 1, 1]
# [1, 1, 1]]
# tf.reduce_sum(x) => 6
# tf.reduce_sum(x, 0) => [2, 2, 2]
# tf.reduce_sum(x, 1) => [3, 3]
#损失函数,一般softmax和交叉熵损失配合使用
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_data * tf.log(prediction), reduction_indices=[1]))
#cross_entropy = tf.reduce_mean(-y_data * tf.log(prediction))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
def computer_accuracy(x_input, y_input):
''' 定义训练精度 '''
global prediction
y_pre = sess.run(prediction, feed_dict={x_data: x_input, y_data: y_input})
correct_prediction = tf.equal(tf.arg_max(y_pre, 1), tf.arg_max(y_input, 1))
# correct_prediction = tf.equal(tf.arg_max(y_pre, dimension=1), tf.arg_max(y_input, dimension=1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
result = sess.run(accuracy, feed_dict={x_data: x_input, y_data: y_input})
return result
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for step in range(1001):
batch_x_data, batch_y_data = mnist.train.next_batch(1000)
sess.run(train_step, feed_dict={x_data: batch_x_data, y_data: batch_y_data})
if step % 40 == 0:
accuracy = computer_accuracy(mnist.test.images, mnist.test.labels)
print (step, accuracy)
结果:
Extracting MNIST_data/train-images-idx3-ubyte.gz
Extracting MNIST_data/train-labels-idx1-ubyte.gz
Extracting MNIST_data/t10k-images-idx3-ubyte.gz
Extracting MNIST_data/t10k-labels-idx1-ubyte.gz
0 0.3994
40 0.8812
80 0.8961
120 0.9021
160 0.9066
200 0.9091
240 0.9114
280 0.9137
320 0.9133
360 0.9147
400 0.9159
440 0.9169
480 0.9179
520 0.9179
560 0.9193
600 0.9193
640 0.9181
680 0.9192
720 0.9206
760 0.9201
800 0.9209
840 0.92
880 0.9205
920 0.9211
960 0.9203
1000 0.9201