梯度下降与逻辑回归
来源:互联网 发布:中国沿海湿地保护网络 编辑:程序博客网 时间:2024/05/18 01:29
梯度下降:
代码:
http://blog.csdn.net/marvin521/article/details/9263483
公式:
http://blog.csdn.net/jj12345jj198999/article/details/8969393
# coding: utf-8import numpy as npimport matplotlib.pyplot as pltdef loadDataSet(): dataMat = [] labelMat = [] fp = open('testSet') for line in fp.readlines(): lineArr = line.strip().split() dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])]) labelMat.append(int(lineArr[2])) return dataMat, labelMatdef sigmoid(inX): return 1.0 / (1 + np.exp(-inX))#普通梯度下降def gradAscent(dataMatIn, classLabels): dataMat = np.mat(dataMatIn) labelMat = np.mat(classLabels).transpose() m, n = np.shape(dataMat) alpha = 0.001 maxCycles = 500 weights = np.ones((n, 1)) for i in range(maxCycles): h = sigmoid(dataMat * weights) error = (labelMat - h) weights += alpha * dataMat.transpose() * error return weightsdef plotBestFit(weights): dataMat, labelMat = loadDataSet() dataArr = np.array(dataMat) n = np.shape(dataArr)[0] xcord1 = []; ycord1 = [] xcord2 = []; ycord2 = [] for i in range(n): if int(labelMat[i]) == 1: xcord1.append(dataArr[i, 1]); ycord1.append(dataArr[i, 2]) else: xcord2.append(dataArr[i, 1]); ycord2.append(dataArr[i, 2]) fig = plt.figure() ax = fig.add_subplot(111) ax.scatter(xcord1, ycord1, s=30, c='red', marker='s') ax.scatter(xcord2, ycord2, s=30, c='green') x = np.arange(-3.0, 3.0, 0.1) y = (-weights[0] - weights[1] * x) / weights[2] ax.plot(x, y) plt.xlabel('X1'); plt.ylabel('x2') plt.show()if __name__ == "__main__": dataMat, labelMat = loadDataSet() print(dataMat, labelMat) weights = gradAscent(dataMat, labelMat) plotBestFit(weights)
-0.01761214.0530640-1.3956344.6625411-0.7521576.5386200-1.3223717.15285300.42336311.05467700.4067047.06733510.66739412.7414520-2.4601506.86680510.5694119.5487550-0.02663210.42774300.8504336.92033411.34718313.17550001.1768133.1670201-1.7818719.0979530-0.5666065.74900310.9316351.5895051-0.0242056.1518231-0.0364532.6909881-0.1969490.44416511.0144595.75439911.9852983.2306191-1.693453-0.5575401-0.57652511.7789220-0.346811-1.6787301-2.1244842.67247111.2179169.5970150-0.7339289.0986870-3.642001-1.61808710.3159853.52395311.4166149.6192320-0.3863233.98928610.5569218.29498411.22486311.5873600-1.347803-2.40605111.1966044.95185110.2752219.54364700.4705759.3324880-1.8895679.5426620-1.52789312.1505790-1.18524711.3093180-0.4456783.29730311.0422226.1051551-0.61878710.32098601.1520830.54846710.8285342.6760451-1.23772810.5490330-0.683565-2.16612510.2294565.9219381-0.95988511.55533600.49291110.99332400.1849928.7214880-0.35571510.3259760-0.3978228.05839700.82483913.73034301.5072785.02786610.0996716.8358391-0.34400810.71748501.7859287.7186451-0.91880111.5602170-0.3640094.7473001-0.8417224.11908310.4904261.9605391-0.0071949.07579200.35610712.44786300.34257812.2811620-0.810823-1.46601812.5307776.47680111.29668311.60755900.47548712.0400350-0.78327711.00972500.07479811.0236500-1.3374720.4683391-0.10278113.7636510-0.1473242.87484610.5183899.88703501.0153997.5718820-1.658086-0.02725511.3199442.17122812.0562165.0199811-0.8516334.3756911-1.5100476.0619920-1.076637-3.18188811.82109610.28399003.0101508.4017661-1.0994581.6882741-0.834872-1.7338691-0.8466373.84907511.40010212.62878101.7528425.46816610.0785570.05973610.089392-0.71530011.82566212.69380800.1974459.74463800.1261170.9223111-0.6797971.22053010.6779832.55666610.76134910.6938620-2.1687910.14363211.3886109.34199700.31702914.7390250
0 0
- 梯度下降与逻辑回归
- 逻辑回归与梯度下降
- 逻辑回归与梯度下降
- 逻辑回归与梯度下降法
- 逻辑回归:损失函数与梯度下降
- 逻辑回归及梯度下降
- 梯度下降和逻辑回归
- 逻辑回归-梯度下降训练
- 梯度下降求解逻辑回归
- 回归与梯度下降
- 回归与梯度下降
- 逻辑斯蒂回归与梯度下降算法
- 逻辑斯蒂回归与梯度下降算法
- 梯度下降 && 线性回归 && 逻辑回归 && softmax
- 逻辑回归梯度下降公式详细推导
- 逻辑回归梯度下降法详解
- 逻辑回归-梯度下降法 python实现
- 逻辑回归模型推导及梯度下降
- swift 字符串,数组,字典 的那些事<三>
- Bootstrap3的栅格化样式
- Jquery分步指引给件
- Codeforces Round #292 (Div. 2) -- A. Drazil and Date
- 类和结构体一
- 梯度下降与逻辑回归
- SecureCRT连接Linux显示Mysql记录中文乱码
- LeetCode 题解(106): Permutations
- oracle用户创建及权限设置
- LDAP概念和原理
- UIStepper
- 3D_HTML5 3D元素周期表
- 九度OJ-题目1214:丑数
- java远程调.net webservice例子