Tensorflow简介

来源:互联网 发布:mac怎么切换输入法 编辑:程序博客网 时间:2024/06/05 09:44

  • Tensorflow解析
    • 系统角度
    • 实现角度
      • Computational Graph Architecture
    • 运用角度

Tensorflow解析

系统角度

TF系统架构

实现角度

Computational Graph Architecture

Data Flow Graph

运用角度

#!/usr/bin/env python# -*- coding: utf-8 -*-""" A one-hidden-layer-MLP MNIST-classifier. """from __future__ import absolute_importfrom __future__ import divisionfrom __future__ import print_function# Import the training data (MNIST)from tensorflow.examples.tutorials.mnist import input_dataimport tensorflow as tf# Possibly download and extract the MNIST data set.# Retrieve the labels as one-hot-encoded vectors.mnist = input_data.read_data_sets("/tmp/mnist", one_hot=True)# Create a new graphgraph = tf.Graph()# Set our graph as the one to add nodes towith graph.as_default():    # Placeholder for input examples (None = variable dimension)    examples = tf.placeholder(shape=[None, 784], dtype=tf.float32)    # Placeholder for labels    labels = tf.placeholder(shape=[None, 10], dtype=tf.float32)    weights =tf.Variable(tf.truncated_normal(shape=[784,10], stddev=0.1))    bias = tf.Variable(tf.constant(0.1, shape=[10]))    # Apply an affine transformation to the input features    logits = tf.matmul(examples, weights) + bias    estimates = tf.nn.softmax(logits)    # Compute the cross-entropy    cross_entropy = -tf.reduce_sum(labels * tf.log(estimates), reduction_indices=[1])    # And finally the loss    loss = tf.reduce_mean(cross_entropy)    # Create a gradient-descent optimizer that minimizes the loss.    # We choose a learning rate of 0.01    optimizer = tf.train.GradientDescentOptimizer(0.5).minimize(loss)    # Find the indices where the predictions were correct    correct_predictions = tf.equal(tf.argmax(estimates, dimension=1),                                   tf.argmax(labels, dimension=1))    accuracy = tf.reduce_mean(tf.cast(correct_predictions, tf.float32))with tf.Session(graph=graph) as session:    tf.initialize_all_variables().run()    for step in range(1001):        example_batch, label_batch = mnist.train.next_batch(100)        feed_dict = {examples: example_batch, labels: label_batch}        if step % 100 == 0:            _, loss_value, accuracy_value = session.run(                                      [optimizer, loss, accuracy],                                      feed_dict=feed_dict)            print("Loss at time {0}: {1}".format(step, loss_value))            print("Accuracy at time {0}: {1}".format(step, accuracy_value))        else:            optimizer.run(feed_dict)
原创粉丝点击