# 机器学习的基础讲解：神经网络

AiTechYun

http://imgcdn.atyun.com/2017/12/one-But-what-is-a-Neural-Network.mp4

http://imgcdn.atyun.com/2017/12/two-how-neural-networks-learn.mp4

http://imgcdn.atyun.com/2017/12/three-What-is-backpropagation-really-doing.mp4

http://imgcdn.atyun.com/2017/12/Backpropagation-calculus-Appendix-to-deep-learning-chapter-3.mp4

http://www.atyun.com/12400_通过简单的线性回归理解机器学习的基本原理.html

import tensorflow as tffrom tensorflow.examples.tutorials.mnist import input_dataimport beginl1_nodes = 200l2_nodes = 100final_layer_nodes = 10# define placeholder for data# also considered as the "visibale layer, the layer that we see"X = tf.placeholder(dtype=tf.float32, shape=[None, 784])# placeholder for correct labelsY_ = tf.placeholder(dtype=tf.float32)# define weights / layers here# needs weights and bias for each layer in the network. Input to one layer is the# output from the previous layerw1 = tf.Variable(initial_value=tf.truncated_normal([784, l1_nodes], stddev=0.1))b1 = tf.Variable(initial_value=tf.zeros([l1_nodes]))Y1 = tf.nn.relu(tf.matmul(X, w1) + b1)w2 = tf.Variable(initial_value=tf.truncated_normal([l1_nodes, l2_nodes], stddev=0.1))b2 = tf.Variable(tf.zeros([l2_nodes]))Y2 = tf.nn.relu(tf.matmul(Y1, w2) + b2)w3 = tf.Variable(initial_value=tf.truncated_normal([l2_nodes, final_layer_nodes], stddev=0.1))b3 = tf.Variable(tf.zeros([final_layer_nodes]))Y = tf.nn.softmax(tf.matmul(Y2, w3) + b3)# defien cost function and evaluation metriccross_entropy = -tf.reduce_sum(Y_ * tf.log(Y))is_correct = tf.equal(tf.argmax(Y, 1), tf.argmax(Y_, 1))accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32))# gradient descentoptimizer = tf.train.GradientDescentOptimizer(learning_rate=0.003)train_step = optimizer.minimize(loss=cross_entropy)

Tensorflow：https://gist.github.com/conormm/1c82b093c9c6002e7ca6ff6e9fb34f05

Keras：https://gist.github.com/conormm/e1dd2ee37733f4817e09a41d625d9e7f

• 发表于:
• 原文链接：http://kuaibao.qq.com/s/20171228B0IKRJ00?refer=cp_1026

2019-02-23

2019-02-23

2019-02-23

2019-02-23

2018-06-01

2019-02-23

2019-02-23

2019-02-23

2019-02-23

2019-02-23