gradient descent 的python实现

来源:互联网 发布:linux和unix哪个好 编辑:程序博客网 时间:2024/05/29 19:19

从模拟数据集

到曲线拟合

# -*- coding: utf-8 -*-"""Created on Tue Sep  5 21:21:58 2017@author: wjw模拟产生数据集,然后再进行拟合"""def gradicent(train,A,B,C):    #当估计函数为    a*x**2+b*x+c    #a,b,c = sp.Symbols("a b c")    a = sp.Symbol("a")    b = sp.Symbol("b")    c = sp.Symbol("c")        n = 0    max_itor = 1000    alpha = 0.0001    epslion = 0.001        for index,row in train.iterrows(): #得到每一行的数据            x = row['x']            y = row['y']            plt.plot(x,y,'ro')    error1 = 0    error2 = 0    while True:        n += 1        if n>max_itor:break            error1 = error2        sum_A =sum_B=sum_C=0        for index,row in train.iterrows(): #得到每一行的数据            x = row['x']            y = row['y']            #求导过程,diff中为损失函数#                        sum_A += sp.diff((y-a*x**2-b*x-c)**2,a).subs({a:A,b:B,c:C})                  sum_B += sp.diff((y-a*x**2-b*x-c)**2,b).subs({a:A,b:B,c:C})            sum_C += sp.diff((y-a*x**2-b*x-c)**2,c).subs({a:A,b:B,c:C})            error2 += (y-A*x**2-B*x-C)**2        print("sumA:",sum_A,'\n','sumB:',sum_B,'\n','sumC:',sum_C,'\n',abs(error2-error1))        A -= alpha*(sum_A/train.shape[0])        B -= alpha*(sum_B/train.shape[0])        C -= alpha*(sum_C/train.shape[0])                if abs(error1-error2/train.shape[0])<epslion:            break    print(n)       return(A,B,C)  if __name__ == "__main__":        import sympy as sp    import numpy as np    import pandas as pd    import matplotlib.pyplot as plt    x = np.arange(1,10,0.2)    y = list(map(lambda x:x**2,x))        #在python3中map产生的是迭代器,需要转换成list    dzip = np.array(list(zip(x,y)))    dataSet = pd.DataFrame(dzip,columns=["x","y"])     #dataframe里面不仅仅可以是list也可以是array    train = dataSet.sample(axis=0,frac=0.3)#       #sample随机抽取30%的行数据作为train,共14条数据        A,B,C = gradicent(train,1,1,2)    print(A,B,C)    X = np.arange(1,10,0.01)    Y = list(map(lambda x:A*x**2+B*x+C,X))    plt.plot(X,Y,color='blue')#        plt.show()
效果如下:

阅读全文
1 0
原创粉丝点击