import numpy as np
import tensorflow.compat.v1 as tf
#步骤1:生成数据集
BATCH_SIZE=8
seed=23455
rng=np.random.RandomState(seed)#产生随机数
X=rng.rand(32,2)#32*2的矩阵
Y=[[int(x0 x1<1)] for (x0,x1) in X]
print("X:n",X)
print("Y:n",Y)
#步骤2:定义神经网络的输入,参数和输出,定义前向传播过程
x=tf.placeholder(tf.float32,(None,2))
y_=tf.placeholder(tf.float32,(None,1))
w1=tf.Variable(tf.random_normal([2,3],stddev=1,seed=1))
w2=tf.Variable(tf.random_normal([3,1],stddev=1,seed=1))
a=tf.matmul(x,w1)
y=tf.matmul(a,w2)
#步骤3:定义损失函数和反向传播方法
loss=tf.reduce_mean(tf.square(y-y_))
train_step=tf.train.GradientDescentOptimizer(0.001).minimize(loss)#梯度下降
#步骤4:生成会话,训练3000次
with tf.Session() as sess:
init_op=tf.global_variables_initializer()
sess.run(init_op)
print("w1:n",sess.run(w1))
print("w2:n",sess.run(w2))
print("n")
STEPS=3000
for i in range(STEPS):
start=(i*BATCH_SIZE)2
end=start BATCH_SIZE
sess.run(train_step,feed_dict={x:X[start:end],y_:Y[start:end]})
if iP0==0:
total_loss=sess.run(loss,feed_dict={x:X,y_:Y})
print("After %d training steps,loss on all data is %g"%(i,total_loss))
print("n")
print("w1:n",sess.run(w1))
print("w2:n",sess.run(w2))
X: [[0.83494319 0.11482951] [0.66899751 0.46594987] [0.60181666 0.58838408] [0.31836656 0.20502072] [0.87043944 0.02679395] [0.41539811 0.43938369] [0.68635684 0.24833404] [0.97315228 0.68541849] [0.03081617 0.89479913] [0.24665715 0.28584862] [0.31375667 0.47718349] [0.56689254 0.77079148] [0.7321604 0.35828963] [0.15724842 0.94294584] [0.34933722 0.84634483] [0.50304053 0.81299619] [0.23869886 0.9895604 ] [0.4636501 0.32531094] [0.36510487 0.97365522] [0.73350238 0.83833013] [0.61810158 0.12580353] [0.59274817 0.18779828] [0.87150299 0.34679501] [0.25883219 0.50002932] [0.75690948 0.83429824] [0.29316649 0.05646578] [0.10409134 0.88235166] [0.06727785 0.57784761] [0.38492705 0.48384792] [0.69234428 0.19687348] [0.42783492 0.73416985] [0.09696069 0.04883936]] Y: [[1], [0], [0], [1], [1], [1], [1], [0], [1], [1], [1], [0], [0], [0], [0], [0], [0], [1], [0], [0], [1], [1], [0], [1], [0], [1], [1], [1], [1], [1], [0], [1]] w1: [[-0.8113182 1.4845988 0.06532937] [-2.4427042 0.0992484 0.5912243 ]] w2: [[-0.8113182 ] [ 1.4845988 ] [ 0.06532937]] After 0 training steps,loss on all data is 5.13118 After 500 training steps,loss on all data is 0.429111 After 1000 training steps,loss on all data is 0.409789 After 1500 training steps,loss on all data is 0.399923 After 2000 training steps,loss on all data is 0.394146 After 2500 training steps,loss on all data is 0.390597 w1: [[-0.7000663 0.9136318 0.08953571] [-2.3402493 -0.14641273 0.58823055]] w2: [[-0.06024271] [ 0.9195618 ] [-0.06820712]]
算法:前反向传播是准备数据集,前向传播,反向传播,生成会话的过程。
网址:https://blog.csdn.net/bitcarmanlee/article/details/78819025