您的位置:首页 > 理论基础 > 计算机网络

反向传播神经网络的简单实现

2018-01-28 17:08 453 查看
先把代码放这里,后面再写字

# coding=utf-8
# 反向传播神经网络
import numpy as np
import math
import sys

np.random.seed(0)

def sigmoid(x):
return 1.0 / (1.0 + np.exp(-x))

def dsigmoid(x):
return x * (1 - x)

class NN:

def __init__(self, ni, nh, no):
"""
ni: num of input neural
nh: num of hiden neural
no: num of output neural
"""
self.wi = np.random.uniform(-0.2, 0.2, size = (ni + 1, nh))
self.wo = np.random.uniform(-2.0, 2.0, size = (nh, no))

def run_nn(self, inputs):
self.ai = inputs.copy()
input_with_b = np.column_stack((self.ai, np.ones(self.ai.shape[0])))

# 计算隐藏层的输出
self.ah = np.array(map(sigmoid, np.dot(input_with_b, self.wi)))

# 计算输出层的输出
self.ao = np.array(map(sigmoid, np.dot(self.ah, self.wo)))

return self.ao

def back_propagate(self, y, learn_rate):

# 计算输出层delta
err = y - self.ao
output_delta = err * np.array(map(dsigmoid, self.ao))

# 更新输出层权值
change = np.dot(self.ah.T, output_delta)
self.wo += change * learn_rate

# 计算隐藏层delta
err = np.dot(output_delta, self.wo.T)
hidden_delta = err * dsigmoid(self.ah)

# 更新输入层权值
input_with_b = np.column_stack((self.ai, np.ones(self.ai.shape[0])))
change = np.dot(input_with_b.T, hidden_delta)
self.wi += change * learn_rate

# 计算误差平方和
err = 0.5 * (y - self.ao) ** 2
return err

def test(self, inputs, outputs):
for i in range(0, inputs.shape[0]):
print self.run_nn(inputs[i:i+1]), outputs[i:i+1]

def train(self, inputs, outputs, max_iterations = 1000, learn_rate = 0.5):
for n in range(max_iterations):
for i in range(0, inputs.shape[0]):
self.run_nn(inputs[i:i+1])
err = self.back_propagate(outputs[i:i+1], learn_rate)
if n % 50 == 0:
print 'err: ', err
self.test(inputs, outputs)

def main():
x = np.array ([
[0, 0],
[0, 1],
[1, 0],
[1, 1],
])

y = np.array ([
[1],
[1],
[1],
[0],
])

nn = NN(2, 2, 1)
nn.train(x, y)

if __name__ == '__main__':
main()
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: