Tensorflow学习5-2:Tensorboard网络运行

用 Tensorboard 查看网络运行下的各种统计量变化

例子

1
2
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
1
2
3
4
5
6
7
8
9
10
11
# STEP 1: 设计统计函数,用于计算传入的张量的各种统计量
def variable_summary(var):
with tf.name_scope("summary"):
mean = tf.reduce_mean(var)
tf.summary.scalar("mean", mean) #平均值
with tf.name_scope("stddev"):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar("stddev", stddev) #标准差
tf.summary.scalar("max", tf.reduce_max(var)) #最大值
tf.summary.scalar("min", tf.reduce_min(var)) #最小值
tf.summary.histogram("histogram", var) #直方图
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
#载入数据集
mnist = input_data.read_data_sets("MNIST_data", one_hot=True)

#每个批次大小
batch_size = 100
#计算有多少批次
batch_num = mnist.train.num_examples // batch_size

with tf.name_scope("input"):
input_x = tf.placeholder(tf.float32, [None, 784], name="input_x")
input_y = tf.placeholder(tf.float32, [None, 10], name="input_y")

#创建神经网络模型
with tf.name_scope("layers"):
W1 = tf.Variable(tf.truncated_normal([784,128], 0.,0.5), name="W1")
variable_summary(W1) # STEP 2.1
b1 = tf.Variable(tf.zeros([128]) + 0.1, name="b1")
variable_summary(b1) # STEP 2.1
L1 = tf.nn.relu(tf.matmul(input_x, W1) + b1, name="L1")

W2 = tf.Variable(tf.truncated_normal([128,10], 0.,0.5), name="W2")
b2 = tf.Variable(tf.zeros([10]) + 0.1, name="b2")
L2 = tf.add(tf.matmul(L1, W2), b2, name="L2")
with tf.name_scope("loss"):
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=L2, labels=input_y))
tf.summary.scalar("loss", loss) # STEP 2.2
with tf.name_scope("train_and_optimizer"):
train = tf.train.GradientDescentOptimizer(0.2).minimize(loss)

#获取用于显示的精度——优化效果
with tf.name_scope("accuracy"):
correct_indices = tf.equal(tf.argmax(input_y, 1), tf.argmax(L2, 1))
accuracy = tf.reduce_mean(tf.cast(correct_indices, tf.float32))
tf.summary.scalar("accuracy", accuracy) # STEP 2.2

# STEP 3:合并所有summary
merged = tf.summary.merge_all()

init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
# STEP 4
writer = tf.summary.FileWriter("D:/Tensorflow/logs", sess.graph)
for epoch in range(5):
for batch in range(batch_num):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
# STEP 5:每次训练,计算一次merge summary
summary,_ = sess.run([merged, train], feed_dict={input_x:batch_xs, input_y:batch_ys})
# STEP 6:选择多久更新写入一次summary
writer.add_summary(summary, epoch)
_accuracy = sess.run(accuracy, feed_dict={input_x:mnist.test.images, input_y:mnist.test.labels})
print("epoch:"+ str(epoch) + ", accuracy:"+ str(_accuracy))

Extracting MNIST_data\train-images-idx3-ubyte.gz
Extracting MNIST_data\train-labels-idx1-ubyte.gz
Extracting MNIST_data\t10k-images-idx3-ubyte.gz
Extracting MNIST_data\t10k-labels-idx1-ubyte.gz
epoch:0, accuracy:0.9063
epoch:1, accuracy:0.9252
epoch:2, accuracy:0.9332
epoch:3, accuracy:0.9426
epoch:4, accuracy:0.9456


总结

Tensorboard显示指定张量的统计量方法步骤:
STEP 1:设计自定义函数计算统计量
STEP 2:在想指定的张量后插入函数
2.1:对于权值、偏置等要查看各种统计量的,使用自定义函数 variable_summary
2.2:对于只需要显示自身数值的张量,使用 tf.summary.scalar
STEP 3:在Session()前,合并所有summary
STEP 4:在Session()里,定义writer,确定日志目录
STEP 5:每次训练后,计算一次merged
STEP 6:选择多久更新写入一次summary
STEP 7:运行代码后启动tensorboard

1
tensorboard --logdir=D:/logs

꧁༺The༒End༻꧂