ANN神经网络代码在Matlab中的简单实现

来源:互联网 发布:相册制作软件哪个好 编辑:程序博客网 时间:2024/05/19 02:17
<pre name="code" class="plain"><span style="color:#993399;">function</span> ANN(m,n,o)              <span style="color:#009900;">  %m是自变量因素行数,n是因变量因素行数,o是所要预测用的自变量因素行数,一般m==o</span>warning off;format long g;[filename filepath]=uigetfile(<span style="color:#6600cc;">'*.xlsx','选择神经网络需要参考训练的样本数据'</span>);N=n;M=m;%[filename2 filepath2]=uigetfile(<span style="color:#6600cc;">'*.xlsx','请选择神经网络需要预测的样本数据'</span>);             xlsx=[filepath filename];XLSX=xlsread(xlsx);[row column]=size(XLSX);samnum=column;                     <span style="color:#6600cc;">%暂时选择数据列数作为样本数量和训练数量</span>testsamnum=column;forcastsam=o;hiddenunit=8;indim=M;outdim=N;p=XLSX(1:M,:);t=XLSX(M+1:row,:);[samin,minp,maxp,tn,mint,maxt]=premnmx(p,t);rand('state',sum(100*clock));noisevar=0.01;noise=noisevar*randn(o,samnum);samout=tn+noise;testsamin=samin;testsamout=samout;maxepochs=50000;               <span style="color:#006600;"> </span><span style="color:#009900;">%最大训练次数</span>learnrate=0.035;                <span style="color:#009900;">%学习速率</span>maxerror=0.65*10^(-3);w1=0.5*rand(hiddenunit,M)-0.1;B1=0.5*rand(hiddenunit,1)-0.1;w2=0.5*rand(N,hiddenunit)-0.1;B2=0.5*rand(N,1)-0.1;errhistory=[];for i=1:maxepochs;    hiddenout=logsig(w1*samin+repmat(B1,1,samnum));    networkout=w2*hiddenout+repmat(B2,1,samnum);    error=samout-networkout;    sse=sumsqr(error);    errhistory=[errhistory sse];    if sse<maxerror,break,end    <span style="color:#009900;">%------------------神经网络核心--------------------%</span>    delta2=error;    delta1=w2'*delta2.*hiddenout.*(1-hiddenout);    dw2=delta2*hiddenout';    dB2=delta2*ones(samnum,1);    dw1=delta1*samin';    dB1=delta1*ones(samnum,1);    <span style="color:#009900;">%-------------权值/阀值完成重新动态调整--------------%</span>    w2=w2+learnrate*dw2;    B2=B2+learnrate*dB2;    w1=w1+learnrate*dw1;    B1=B1+learnrate*dB1;enddisp([<span style="color:#6600cc;">'神经网络已经完成训练,请导入预测数据'</span>])hiddenout=logsig(w1*samin+repmat(B1,1,testsamnum));anew=w2*hiddenout+repmat(B2,1,testsamnum);ANS=postmnmx(anew,mint,maxt);%绘制x=1:column;plot(x,ANS,'o',x,t,'b--+')[filename2 filepath2]=uigetfile(<span style="color:#6600cc;">'*.xlsx','请选择神经网络需要预测的样本数据'</span>);xlsx2=[filepath2 filename2];XLSX2=xlsread(xlsx2);pnew=tramnmx(XLSX2,minp,maxp);hiddenout=logsig(w1*pnew+repmat(B1,1,forcastsam));anew=w2*hiddenout+repmat(B2,1,forcastsam);answer=postmnmx(anew,mint,maxt)  <span style="color:#009900;">%输出预测数据</span> <span style="color:#009900;">%以下函数实现最优化功能,不需要可直接删去</span>[row2 column2]=size(pnew);zero=eye(row2);zero2=eye(row2);A=pnewfor i=1:M   zero(i,i)=1.01;   zero2(i,i)=0.09;   testnum1=0;testnum2=0;   B=zero*A;   C=zero2*A;   for j=1:10000       B=zero*A;       C=zero2*A;       <span style="color:#009900;">%如果说在对目前参数值左右变化时输出结果总不是最大,则循环继续</span>       if postmnmx(w2*logsig(w1*A+repmat(B1,1,forcastsam))+repmat(B2,1,forcastsam),mint,maxt)<postmnmx(w2*logsig(w1*B...               +repmat(B1,1,forcastsam))+repmat(B2,1,forcastsam),mint,maxt)||...               postmnmx(w2*logsig(w1*A+repmat(B1,1,forcastsam))+repmat(B2,1,forcastsam),mint,maxt)<...               postmnmx(w2*logsig(w1*C+repmat(B1,1,forcastsam))+repmat(B2,1,forcastsam),mint,maxt)           if postmnmx(w2*logsig(w1*B+repmat(B1,1,forcastsam))+repmat(B2,1,forcastsam),mint,maxt)>...                   postmnmx(w2*logsig(w1*A+repmat(B1,1,forcastsam))+repmat(B2,1,forcastsam),mint,maxt)               A=B;testnum1=testnum1+1;           elseif postmnmx(w2*logsig(w1*A+repmat(B1,1,forcastsam))+repmat(B2,1,forcastsam),mint,maxt)>...                   postmnmx(w2*logsig(w1*C+repmat(B1,1,forcastsam))+repmat(B2,1,forcastsam),mint,maxt)               A=C;testnum2=tsetnum2+1;           end       else break;       end   end   zero(i,i)=1;   zero2(i,i)=1;endpostmnmx(A,minp,maxp)testnum1testnum2<span style="color:#993399;">end</span>

在数学建模比赛中的简单尝试<img alt="吐舌头" src="http://static.blog.csdn.net/xheditor/xheditor_emot/default/tongue.gif" />
1 0
原创粉丝点击