最简单的二进制神经网络

来源:互联网 发布:淘宝助理教程 编辑:程序博客网 时间:2024/06/13 08:40
import numpy as np def hard_sigmoid(x): return ((x+1.)/2).clip(0,1)def binary_tanh_unit(x): return 2.*hard_sigmoid(x)-1.# input datasetX = np.array([ [0,0,1], [0,1,1], [1,0,1], [1,1,1] ]) # output dataset y = np.array([[0,0,1,1]]).T# seed random numbers to make calculation# deterministic (just a good practice)np.random.seed(1)# initialize weights randomly with mean 0syn0 = 2*np.random.random((3,1)) - 1for iter in xrange(10): # forward propagation l0 = X l1 = hard_sigmoid(np.dot(l0,syn0)) # how much did we miss? l1_error = y - l1 #print l1 # multiply how much we missed by the # slope of the sigmoid at the values in l1 l1_delta = binary_tanh_unit(l1_error) # update weights syn0 += np.dot(l0.T,l1_delta) #syn0 += binary_tanh_unit(np.dot(l0.T,l1_delta))print "Output After Training:"print l1
0 0
原创粉丝点击