您的位置:首页 > 其它

Networks with Linear Activation Function

2018-01-03 11:11 183 查看
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline


For Linear Separable Problem

#input
X = np.array([[1,0,0],
[1,0,1],
[1,1,0],
[1,1,1]])
#label
Y = np.array([-1,1,1,1])
#weights vector
W = np.random.random(3)
#learning rate
lr = 0.01

#single perceptron
def update():
global X,Y,W,lr
A = np.dot(X,W)
W += lr*np.dot(Y-A,X)

#run
for _ in range(1000):
update()

#positive samples
x1 = [0,1,1]
y1 = [1,0,1]
#negative samples
x2 = [0]
y2 = [0]

xdata = np.linspace(-0.5,1.5)
slope = -W[1]/W[2]
intercept = -W[0]/W[2]

plt.figure()
plt.plot(xdata, slope*xdata+intercept, 'k-')
plt.plot(x1,y1,'yo')
plt.plot(x2,y2,'go')
plt.show()




For Linear Inseparable Problem (XOR)

#input (introduce non-linear variables, such as x1^2, x1*x2, x2^2)
X = np.array([[1,0,0,0,0,0],
[1,0,1,0,0,1],
[1,1,0,1,0,0],
[1,1,1,1,1,1]])
#label
Y = np.array([-1,1,1,-1])
#weights vector
W = np.random.random(6)
#learning rate
lr = 0.01

#single perceptron
def update():
global X,Y,W,lr
A = np.dot(X,W)
W += lr*np.dot(Y-A,X)

#run
for _ in range(1000):
update()

#positive samples
x1 = [0,1]
y1 = [1,0]
#negative samples
x2 = [0,1]
y2 = [0,1]

xdata = np.linspace(-0.5,1.5)

def get_root(W,x):
a = W[5]
b = W[2]+W[4]*x
c = W[0]+W[1]*x+W[3]*x*x
return ((-b+np.sqrt(b*b-4*a*c))/(2*a),(-b-np.sqrt(b*b-4*a*c))/(2*a))

plt.figure()
plt.plot(xdata, get_root(W,xdata)[0], 'k-')
plt.plot(xdata, get_root(W,xdata)[1], 'k-')
plt.plot(x1,y1,'yo')
plt.plot(x2,y2,'go')
plt.show()


内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: 
相关文章推荐