# 深度学习|tensorflow张量运算

• x和权重矩阵相乘
• 加上偏差值
• 激活函数

### 矩阵相乘和相加

##### 相乘

```X = tf.Variable([[1.,1.,1.]])

W = tf.Variable([[-0.5,-0.2 ],
[-0.3, 0.4 ],
[-0.5, 0.2 ]])

XW =tf.matmul(X,W )

with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
print(sess.run(XW ))
# [[-1.29999995  0.40000001]]```
##### 相加

```b = tf.Variable([[ 0.1,0.2]])
XW =tf.Variable([[-1.3,0.4]])

Sum =XW+b
with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
print('Sum:')
print(sess.run(Sum ))
# Sum:
#[[-1.19999993  0.60000002]]```

### 神经网络实现

```X = tf.Variable([[0.4,0.2,0.4]])

W = tf.Variable([[-0.5,-0.2 ],
[-0.3, 0.4 ],
[-0.5, 0.2 ]])

b = tf.Variable([[0.1,0.2]])

XWb =tf.matmul(X,W)+b

y=tf.nn.relu(tf.matmul(X,W)+b)

with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
print('XWb:')
print(sess.run(XWb ))
print('y:')
print(sess.run(y ))```

##### 随机初始值

```W = tf.Variable(tf.random_normal([3, 2]))
b = tf.Variable(tf.random_normal([1, 2]))
X = tf.Variable([[0.4,0.2,0.4]])
y=tf.nn.relu(tf.matmul(X,W)+b)
with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
print('b:')
print(sess.run(b ))
print('W:')
print(sess.run(W ))
print('y:')
print(sess.run(y ))   ```
##### 输入用placeholder

```W = tf.Variable(tf.random_normal([3, 2]))
b = tf.Variable(tf.random_normal([1, 2]))
X = tf.placeholder("float", [None,3])
y=tf.nn.relu(tf.matmul(X,W)+b)
with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
X_array = np.array([[0.4,0.2,0.4]])
(_b,_W,_X,_y)=sess.run((b,W,X,y),feed_dict={X:X_array})
print('b:')
print(_b)
print('W:')
print(_W)
print('X:')
print(_X)
print('y:')
print(_y)```

### 总结

```def layer(output_dim,input_dim,inputs, activation=None):
W = tf.Variable(tf.random_normal([input_dim, output_dim]))
b = tf.Variable(tf.random_normal([1, output_dim]))
XWb = tf.matmul(inputs, W) + b
if activation is None:
outputs = XWb
else:
outputs = activation(XWb)
return outputs```

158 篇文章49 人订阅

0 条评论