Tensorflow实现Mnist的Tensorboard

来源:互联网 发布:知乎童谣事件始末 编辑:程序博客网 时间:2024/06/05 21:12

运行mnist_with_summaries.py: Tensorboard训练过程可视化

1、


2、运行完毕后,如上图所示。打开终端Terminal,进入tensorboard,输入tensorboard --logdir=/tmp/mnist_logs,(与writer = tf.train.SummaryWriter('/tmp/mnist_logs', sess.graph_def)中的文件路径一致),终端就会运行出:Starting TensorBoard on port 6006 (You can navigate to http://0。0。0。0:6006)


3、然后,打开浏览器,输入链接http://0.0.0.0:6006,回车之后显示如下的结果:

便可以查看相应的图,更多信息查看:https://github.com/yhlleo/mnist

完整的:mnist_with_summaries.py的代码如下:

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
    
import tensorflow.python.platform
import input_data
import tensorflow as tf
    
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_boolean('fake_data', False, 'If true, uses fake data '
    'for unit testing.')
flags.DEFINE_integer('max_steps', 1000, 'Number of steps to run trainer.')
flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.')
    
    
def main(_):
    # Import data
    mnist = input_data.read_data_sets('/home/yuan/testMnist', one_hot=True,
    fake_data=FLAGS.fake_data)
    
    sess = tf.InteractiveSession()
    
    # Create the model
    x = tf.placeholder(tf.float32, [None, 784], name='x-input')
    W = tf.Variable(tf.zeros([784, 10]), name='weights')
    b = tf.Variable(tf.zeros([10], name='bias'))
    
    # Use a name scope to organize nodes in the graph visualizer
    with tf.name_scope('Wx_b'):
       y = tf.nn.softmax(tf.matmul(x, W) + b)
    
    # Add summary ops to collect data
    _ = tf.histogram_summary('weights', W)
    _ = tf.histogram_summary('biases', b)
    _ = tf.histogram_summary('y', y)
    
    # Define loss and optimizer
    y_ = tf.placeholder(tf.float32, [None, 10], name='y-input')
    # More name scopes will clean up the graph representation
    with tf.name_scope('xent'):
       cross_entropy = -tf.reduce_sum(y_ * tf.log(y))
    _ = tf.scalar_summary('cross entropy', cross_entropy)
    with tf.name_scope('train'):
       train_step = tf.train.GradientDescentOptimizer(
       FLAGS.learning_rate).minimize(cross_entropy)
    
    with tf.name_scope('test'):
       correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
       accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
    _ = tf.scalar_summary('accuracy', accuracy)
    
    # Merge all the summaries and write them out to /tmp/mnist_logs
    merged = tf.merge_all_summaries()
    writer = tf.train.SummaryWriter('/tmp/mnist_logs', sess.graph_def)
    tf.initialize_all_variables().run()
    
    # Train the model, and feed in test data and record summaries every 10 steps
    
    for i in range(FLAGS.max_steps):
      if i % 10 == 0: # Record summary data and the accuracy
       if FLAGS.fake_data:
        batch_xs, batch_ys = mnist.train.next_batch(
        100, fake_data=FLAGS.fake_data)
        feed = {x: batch_xs, y_: batch_ys}
       else:
          feed = {x: mnist.test.images, y_: mnist.test.labels}
          result = sess.run([merged, accuracy], feed_dict=feed)
          summary_str = result[0]
          acc = result[1]
          writer.add_summary(summary_str, i)
          print('Accuracy at step %s: %s' % (i, acc))
      else:
         batch_xs, batch_ys = mnist.train.next_batch(
         100, fake_data=FLAGS.fake_data)
         feed = {x: batch_xs, y_: batch_ys}
         sess.run(train_step, feed_dict=feed)
    
if __name__ == '__main__':
  tf.app.run()

0 0
原创粉丝点击