import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
x_date = np.linspace(-0.5,0.5,200)[:,np.newaxis]
noise = np.random.normal(0,0.02,x_date.shape)
y_date = x_date*0.1+0.2+noise
x = tf.placeholder(tf.float32,[None,1])
y = tf.placeholder(tf.float32,[None,1])
w = tf.Variable(tf.random_normal([1,10]))
b = tf.Variable(tf.zeros([10]))
L1 = tf.nn.tanh(tf.matmul(x,w)+b)
w2 = tf.Variable(tf.random_normal([10,1]))
b2 = tf.Variable(tf.zeros([1]))
L2 = tf.nn.tanh(tf.matmul(L1,w2)+b2)
loss = tf.reduce_mean(tf.square(y-L2))
train_step = tf.train.GradientDescentOptimizer(0.2).minimize(loss)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for _ in range(1000):
sess.run(train_step,feed_dict={x:x_date,y:y_date})
producton_value = sess.run(L2,feed_dict={x:x_date})
plt.figure()
plt.scatter(x_date,y_date)
plt.plot(x_date,producton_value,‘r-’,lw=5)
plt.show()
将代码L2 = tf.nn.tanh(tf.matmul(L1,w2)+b2)改为L2 = tf.nn.softmax(tf.matmul(L1,w2)+b2为什么结果就变成