我意识到这并不是我要实现的神经网络,而是更多的一层感知器。我以为这很容易(我想是这样,但我只是想不通。我编写的代码如下:
import numpy as np
import pylab
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
def shuffling(X,t):
if X.shape[0] != t.shape[0]:
X = np.transpose(X)
X,t = shuffle(X,t)
X = np.transpose(X)
t = np.transpose(t)
else:
X,t = shuffle(X,t)
X = np.transpose(X)
t = np.transpose(t)
return X,t
def generating_data():
n = 100
mA = [-1.0, 2.5]
sigmaA = 0.5
mB = [1.5, 0.9]
sigmaB = 0.5
A_1 = np.random.normal(mA[0], sigmaA, n)
A_2 = np.random.normal(mA[1], sigmaA, n)
classA = np.stack((A_1, A_2), axis=1)
B_1 = np.random.normal(mB[0], sigmaB, n)
B_2 = np.random.normal(mB[1], sigmaB, n)
classB = np.stack((B_1,B_2), axis=1)
tot_class = np.vstack((classA, classB))
target_values_A = np.ones(n)
target_values_B = np.ones(n)*(-1)
tot_target = np.concatenate((target_values_A, target_values_B), axis=0)
X = np.insert(tot_class, 2, values = 1, axis =1)
X, t = shuffling(X, tot_target)
return X,t, classA, classB, tot_class
X,t, classA, classB, tot_class = generating_data()
def perceptron2(X, t, max_iter):
n = 200
lr = 0.5
w = np.random.normal(0, 1/n, (1,3))
for k in range(max_iter):
y = np.dot(w, X)
t = np.reshape(t,(1,200))
w = w - lr*np.dot((t-y),np.transpose(X))
#X,t = shuffling(X,t)
return w
w = perceptron2(X,t,10)
print(w)
#print(w)
def plotting(classA, classB, tot_class):
w = perceptron2(X,t,100)
area = np.pi*3
plt.scatter(classA[:,0], classA[:,1], s = area, c='b', alpha=0.5)
plt.scatter(classB[:,0], classB[:,1],s=area, c= 'r', alpha = 0.5)
grid = np.linspace(np.amin(tot_class),np.amax(tot_class))
y = []
for x in grid:[enter image description here][1]
slope = -(w[0][0] / w[0][2]) / (w[0][0] / w[0][1])
intercept = -w[0][0] / w[0][2]
print(slope)
print(intercept)
y.append((slope*x) + intercept)
plt.plot(grid,y, color = 'black')
plt.show()
plotting(classA,classB,tot_class)
这是一个二进制分类问题,正如您从代码中看到的那样,我仅使用了两个功能。对于缺少图像感到抱歉,但是如果您自行运行,您将看到超平面完全关闭。从这一点上可以使我前进的任何事情都受到赞赏,谢谢