# TensorFlow - XOR实现

 A B A XOR B 0 0 0 0 1 1 1 0 1 1 1 0

XOR密码加密方法基本上用于加密，即通过生成与适当密钥匹配的随机加密密钥。

```#声明必要的模块
import tensorflow as tf
import numpy as np
"""
A simple numpy implementation of a XOR gate to understand the backpropagation
algorithm
"""

x = tf.placeholder(tf.float64,shape = [4,2],name = "x")
#声明输入 x 的占位符
y = tf.placeholder(tf.float64,shape = [4,1],name = "y")
#为所需的输出声明一个占位符 y

m = np.shape(x)[0]#number of training examples
n = np.shape(x)[1]#number of features
hidden_s = 2 #隐藏层中的节点数
l_r = 1#l收益率初始化

theta1 = tf.cast(tf.Variable(tf.random_normal([3,hidden_s]),name = "theta1"),tf.float64)
theta2 = tf.cast(tf.Variable(tf.random_normal([hidden_s+1,1]),name = "theta2"),tf.float64)

#进行前向传播
a1 = tf.concat([np.c_[np.ones(x.shape[0])],x],1)
#第一层的权重乘以第一层的输入

z1 = tf.matmul(a1,theta1)
#第二层的输入是第一层的输出，通过添加了激活函数和偏差列

a2 = tf.concat([np.c_[np.ones(x.shape[0])],tf.sigmoid(z1)],1)
#第二层的输入乘以权重

z3 = tf.matmul(a2,theta2)
#输出通过激活函数得到最终概率

h3 = tf.sigmoid(z3)
cost_func = -tf.reduce_sum(y*tf.log(h3)+(1-y)*tf.log(1-h3),axis = 1)

#内置于Tensorflow Optimizer，使用指定进行梯度下降
learning rate to obtain theta values

#设置所需的X和Y值以执行XOR操作
X = [[0,0],[0,1],[1,0],[1,1]]
Y = [[0],[1],[1],[0]]

#初始化所有变量，创建会话并运行TensorFlow会话
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)

#运行梯度下降，用于每次迭代并打印假设
obtained using the updated theta values
for i in range(100000):
sess.run(optimiser, feed_dict = {x:X,y:Y})#setting place holder values using feed_dict
if i%100==0:
print("Epoch:",i)
print("Hyp:",sess.run(h3,feed_dict = {x:X,y:Y}))```

## 视频推荐

TensorFlow - 125 - lstm-1