#-*- coding: utf-8 -*-
import tensorflow as tf
#TB是tensorflow自带的一个强大的可视化工具
tf.set_random_seed(1234)
'''
Step 1: Name variables and make use of scopes for organizing your graph
Step 2: Place summaries for those values you want to keep track
Step 3: Let TF manage all summaries with tf.merge_all_summaries()
Step 4: Create a writer pointing to the desired directory
Step 5: Add the merged summaries to the writer
Step 6: Launch TensorBoard
Use the command ➙ $ tensorboard --logdir=/tmp/regression/run1
Then browse ➙ http://localhost:6006/ or http://127.0.0.1:6006/
○If using Docker you may need to add the parameter -p 6006:6006
'''
with tf.name_scope('data'):
with tf.name_scope('x'):
x = tf.random_normal([100], mean=0.0, stddev=0.9, name='rand_x') #[100]尺寸,均值,标准差
with tf.name_scope('y'):
y_true = x * tf.constant(0.1, name='real_scope') + tf.constant(0.3, name='real_bias') + tf.random_normal([100], mean=0.0, stddev=0.05, name='rand_y')
'''
random_normal: 正太分布随机数,均值mean,标准差stddev
truncated_normal:截断正态分布随机数,均值mean,标准差stddev,不过只保留[mean-2*stddev,mean+2*stddev]范围内的随机数
random_uniform:均匀分布随机数,范围为[minval,maxval]
'''
with tf.name_scope('W'):
W = tf.Variable(tf.random_uniform([], minval=-1.0, maxval=1.0))
#step2:记录你想追踪的参数
tf.scalar_summary('function/W', W)
with tf.name_scope('b'):
b = tf.Variable(tf.zeros([]))
tf.scalar_summary('function/b', b)
with tf.name_scope('function'):
y_pred = W * x + b
with tf.name_scope('error'):
loss = tf.reduce_mean(tf.square(y_pred-y_true))
tf.scalar_summary('error', loss)
train = tf.train.GradientDescentOptimizer(0.05).minimize(loss)
init = tf.initialize_all_variables()
sess = tf.Session()
#step3:
merged = tf.merge_all_summaries()
#tf.train.SummaryWriter用于写入包含了图表本身和即时数据具体值的事件文件
#step4:
writer = tf.train.SummaryWriter('/tmp/regression/run1', sess.graph)
sess.run(init)
#slop:坡;intercept:截距.
for step in range(1,101):
_, slope, intercept, error = sess.run([train, W, b, loss])
#step5:
if step % 10 == 0:
summary_str = sess.run(merged)
writer.add_summary(summary_str, step)
print('Step %.3d; W = %.5f; b= %.5f; loss = %.5f' % (step, slope, intercept, error))
Graph示例:
事件示例: