简单神经网络实现 02

来源:互联网 发布:麦克德莫特 数据 编辑:程序博客网 时间:2024/05/22 21:20


误差选择均方误差
这里写图片描述
梯度下降步骤:
这里写图片描述
数据集为研究生学院录取数据,来源。
数据格式:
这里写图片描述
admit这一栏为标签,其余的栏目是特征。
网络没有设置隐层。
代码:

import pandas as pdimport numpy as npadmissions = pd.read_csv('binary.csv')#把rank特征转换成one-hotdata = pd.concat([admissions, pd.get_dummies(admissions['rank'], prefix='rank')], axis = 1)data = data.drop('rank', axis = 1)#归一化for field in ['gre', 'gpa']:    mean, std = data[field].mean(), data[field].std()    data.loc[:, field] = (data[field] - mean) / std#划分测试集np.random.seed(1)sample = np.random.choice(data.index, size = int(len(data) * 0.9), replace = False)data, test_data = data.ix[sample], data.drop(sample)features, targets = data.drop('admit', axis = 1), data['admit']featrues_test, targets_test = test_data.drop('admit', axis = 1), test_data['admit']def sigmoid(x):    return 1 / (1 + np.exp(-x))n_records, n_features = features.shapelast_loss = Noneweights = np.random.normal(scale = 1 / n_features ** .5, size=n_features)epoches = 5000learn_rate = 0.5for e in range(epoches):    del_w = np.zeros(weights.shape)    for x, y in zip(features.values, targets):        output = sigmoid(np.dot(x, weights))        error = y - output        del_w += error * output * (1 - output) * x    weights += learn_rate * del_w / n_records    if e % (epoches / 10) == 0:        out = sigmoid(np.dot(features, weights))        loss = np.mean((out - targets) ** 2)        if last_loss and last_loss < loss:            print("Train loss: ", loss, "   loss increasing")        else:            print("Train loss: ", loss)        last_loss = losstest_out = sigmoid(np.dot(featrues_test, weights))predictions = test_out > 0.5accuracy = np.mean(predictions == targets_test)print('accuracy:%.3f'%accuracy)