神经网络与深度学习(二又二分之一)cpp-二的补充

来源:互联网 发布:域服务器软件分发 编辑:程序博客网 时间:2024/04/28 22:00

承接二

补充了DataSet的归一函数

void Normaliz(){if (this->Max)delete Max;this->Max = new double[this->InputNum + this->OutputNum];for (int i = 0; i < this->InputNum + this->OutputNum; ++i){double max = DBL_MIN;for (int j = 0; j < DataMap.size(); ++j)max = (max > DataMap[j][i] ? max : DataMap[j][i]);for (int j = 0; j < DataMap.size(); ++j){DataMap[j][i] = (DataMap[j][i] / max) * 0.8 + 0.1;}Max[i] = max;}}

为神经网络添加了一个 带还原功能的Test

void Test_Reget(DataSet *Set){double *output = new double[this->OutLayerNeuNum];double *expect = new double[this->OutLayerNeuNum];for (int i = 0; i < Set->GetRows(); ++i){for (int j = 0; j < Set->GetInputNum(); ++j)this->InputNeurons[j]->SetInput(Set->GetDataMap()[i][j]);this->GetOutput(output);for (int j = 0; j < Set->GetOutputNum(); ++j){double o = output[j], e = Set->GetDataMap()[i][Set->GetInputNum() + j];if (Set->GetMax()){o = (o - 0.1) / 0.8 * Set->GetMax()[Set->GetInputNum() + j];e = (e - 0.1) / 0.8 * Set->GetMax()[Set->GetInputNum() + j];}cout << "output: ";cout << o << "\t";cout << "expect: ";cout << e << "\t";}cout << endl;}cout << endl;cout << "in to hide W:" << endl;for (int i = 0; i < this->HideLayerNeuNum; ++i){for (int j = 0; j < this->InLayerNeuNum; ++j){cout << this->HidenNeurons[i]->GetWeight()[j] << "  ";}cout << endl;}cout << endl;cout << "hide to out W:" << endl;for (int i = 0; i < this->OutLayerNeuNum; ++i){for (int j = 0; j < this->HideLayerNeuNum; ++j){cout << this->OutputNeurons[i]->GetWeight()[j] << "  ";}cout << endl;}}



以下为 新的DataSet 以及 MultiLayerPerceptron 。TransferFunc增加了一些函数但是还没用过 将书上预测股票的前五个试了 有一定误差 不过数据密集 没啥价值


DataSet

//DateSet.h//created by WK#ifndef DATASET_H#define DATASET_H#include <vector>#include <cfloat>using namespace std;//数据集class DataSet{private:intInputNum;intOutputNum;vector<double*>DataMap;double*Max;public:DataSet(int inputnum, int outputnum){this->InputNum=inputnum;this->OutputNum =outputnum;Max=NULL;}void AddRow(double *inputArray, double *outputArray){double *data = new double[this->InputNum + this->OutputNum];for (int i = 0; i<this->InputNum; ++i){data[i] = inputArray[i];}for (int i = 0; i<this->OutputNum; ++i){data[InputNum + i] = outputArray[i];}this->DataMap.push_back(data);}void Normaliz(){if (this->Max)delete Max;elseMax = new double[this->InputNum + this->OutputNum];for (int i = 0; i < this->InputNum + this->OutputNum; ++i){double max = DBL_MIN;for (int j = 0; j < DataMap.size(); ++j)max = (max > DataMap[j][i] ? max : DataMap[j][i]);for (int j = 0; j < DataMap.size(); ++j){DataMap[j][i] = (DataMap[j][i] / max) * 0.8 + 0.1;}Max[i] = max;}}double *GetMax(){return this->Max;}int GetInputNum(){return this->InputNum;}int GetOutputNum(){return this->OutputNum;}int GetRows(){return DataMap.size();}vector<double*> GetDataMap(){return DataMap;} };#endif // !DATASET_H

TransferFunc

//TransferFunc.h//created by WK#ifndef TRANSFERFUNC_H#define TRANSFERFUNC_H#include <cmath>enum Functypes{FUNCTYPE_TANH,FUNCTYPE_STEP,FUNCTYPE_LINEAR,FUNCTYPE_SIGMOID,FUNCTYPE_SGN,FUNVTYPE_RAMP};class Function{private:double Step(double input){if (input <= 0)return 0;elsereturn 1;}double Linear(double input){return input;}double Sigmoid(double input){return 1.0 / (1.0 + exp(-1.0*input));}double Sgn(double input){if (input < 0)return -1;elsereturn 1;}double Ramp(double input){if (input < 0)return 0;else if (input >= 0 && input <= 1)return input;elsereturn 1;}public:double GetResult(int funcType, double input){switch (funcType){case FUNCTYPE_TANH:return tanh(input);case FUNCTYPE_STEP:return Step(input);case FUNCTYPE_LINEAR:return Linear(input);case FUNCTYPE_SIGMOID:return Sigmoid(input);case FUNCTYPE_SGN:return Sgn(input);case FUNVTYPE_RAMP:return Ramp(input);default:return input;}}};#endif // !TRANSFERFUNC_H

MultiLayerPerceptron


#include <vector>#include <iostream>#include "TransferFunc.h"#include "DataSet.h"#include <time.h>#include <cstdlib>using namespace std;#define WINITVALUE 0.001#define TINITVALUE 0//神经元class Neuron{private:doubleInput;doubleOutput;doubleThreshold;double*Last_weight;//神经元维护后向的权重intLastLayerNeuNum;intTransferFunctionType;FunctionTransferfunction;public:Neuron(double threshold, int lastlayerneunum, int funcType){this->Input=0;this->Output=0;this->Threshold=threshold;this->LastLayerNeuNum=lastlayerneunum;this->TransferFunctionType=funcType;this->Last_weight=new double[lastlayerneunum];//关键的初始化权值for (int i = 0; i < lastlayerneunum; ++i)this->Last_weight[i] = (2.0*(double)rand() / RAND_MAX) - 1;}void SetInput(double input){this->Input = input;}double GetOutput(){this->Output = Transferfunction.GetResult(this->TransferFunctionType, this->Input - this->Threshold);return this->Output;}double* GetThreshold(){return &this->Threshold;}double *GetWeight(){return this->Last_weight;}void SetFuncType(int functype){this->TransferFunctionType = functype;}};//多层感知机class MultiLayerPerceptron{private:intOutTransfetFunctionType;intHideTransfetFunctionType;intInTransfetFunctionType;intInLayerNeuNum;intHideLayerNeuNum;intOutLayerNeuNum;doubleSpeed;Neuron**InputNeurons;Neuron**OutputNeurons;Neuron**HidenNeurons;public:MultiLayerPerceptron(int intransferfunctiontype, int inLayerNeuNum, int hidetransferfunctiontype, int hideLayerNeuNum, int outtransferfunctiontype, int outLayerNeuNum, double speed){this->InTransfetFunctionType=intransferfunctiontype;this->HideTransfetFunctionType=hidetransferfunctiontype;this->OutTransfetFunctionType=outtransferfunctiontype;this->InLayerNeuNum=inLayerNeuNum;this->HideLayerNeuNum=hideLayerNeuNum;this->OutLayerNeuNum=outLayerNeuNum;this->Speed=speed;this->InputNeurons= (Neuron**)new void*[inLayerNeuNum];for (int i = 0; i < inLayerNeuNum; ++i)this->InputNeurons[i] = new Neuron(TINITVALUE, 0, intransferfunctiontype);this->HidenNeurons= (Neuron**)new void*[hideLayerNeuNum];for (int i = 0; i < hideLayerNeuNum; ++i)this->HidenNeurons[i] = new Neuron(TINITVALUE, inLayerNeuNum, hidetransferfunctiontype);this->OutputNeurons = (Neuron**)new void*[outLayerNeuNum];for (int i = 0; i < outLayerNeuNum; ++i)this->OutputNeurons[i] = new Neuron(TINITVALUE, hideLayerNeuNum, outtransferfunctiontype);}//获取正向的输出void GetOutput(double *output){double sum;for (int i = 0; i < this->HideLayerNeuNum; ++i){sum = 0;for (int j = 0; j < this->InLayerNeuNum; ++j)sum += this->HidenNeurons[i]->GetWeight()[j] * this->InputNeurons[j]->GetOutput();this->HidenNeurons[i]->SetInput(sum);}for (int i = 0; i < this->OutLayerNeuNum; ++i){sum = 0;for (int j = 0; j < this->HideLayerNeuNum; ++j)sum += this->OutputNeurons[i]->GetWeight()[j] * this->HidenNeurons[j]->GetOutput();this->OutputNeurons[i]->SetInput(sum);output[i] = this->OutputNeurons[i]->GetOutput();}}//学习所有数据一次void Learn(DataSet *trainingSet){double *expect;double *data;double *output = new double[this->OutLayerNeuNum];for (int i = 0; i < trainingSet->GetRows(); ++i){data= trainingSet->GetDataMap()[i];expect= data + trainingSet->GetInputNum();for (int j = 0; j < trainingSet->GetInputNum(); ++j)this->InputNeurons[j]->SetInput(data[j]);this->GetOutput(output);//更改隐藏层到输出层权重以及阈值//更新公式详见机器学习for (int j = 0; j < this->OutLayerNeuNum; ++j){double delta = this->Speed * output[j] * (1 - output[j]) * (expect[j] - output[j]);for (int k = 0; k < this->HideLayerNeuNum; ++k)this->OutputNeurons[j]->GetWeight()[k] += (delta * this->HidenNeurons[k]->GetOutput());*this->OutputNeurons[j]->GetThreshold() -= delta;}//更改输入层到隐藏层的权重以及阈值//更新公式详见机器学习for (int j = 0; j < this->HideLayerNeuNum; ++j){double t = 0;for (int k = 0; k < this->OutLayerNeuNum; ++k)t += (this->OutputNeurons[k]->GetWeight()[j] * output[k] * (1 - output[k])*(expect[k] - output[k]));double delta = this->HidenNeurons[j]->GetOutput() * (1 - this->HidenNeurons[j]->GetOutput()) * t;for (int k = 0; k < this->InLayerNeuNum; ++k)this->HidenNeurons[j]->GetWeight()[k] += (this->Speed * this->InputNeurons[k]->GetOutput() * delta);*this->HidenNeurons[j]->GetThreshold() -= (this->Speed * delta);}}}void Test(DataSet *Set){double *output = new double[this->OutLayerNeuNum];double *expect = new double[this->OutLayerNeuNum];for (int i = 0; i < Set->GetRows(); ++i){for (int j = 0; j < Set->GetInputNum(); ++j)this->InputNeurons[j]->SetInput(Set->GetDataMap()[i][j]);this->GetOutput(output);for (int j = 0; j < Set->GetOutputNum(); ++j){cout << "output: ";cout << output[j] << "\t";cout << "expect: ";cout << Set->GetDataMap()[i][Set->GetInputNum()+j]<<"\t";}cout << endl;} cout << endl;cout << "in to hide W:" << endl;for (int i = 0; i < this->HideLayerNeuNum; ++i){for (int j = 0; j < this->InLayerNeuNum; ++j){cout << this->HidenNeurons[i]->GetWeight()[j] << "  ";}cout << endl;}cout << endl;cout << "hide to out W:" << endl;for (int i = 0; i < this->OutLayerNeuNum; ++i){for (int j = 0; j < this->HideLayerNeuNum; ++j){cout << this->OutputNeurons[i]->GetWeight()[j] << "  ";}cout << endl;}}void Test_Reget(DataSet *Set){double *output = new double[this->OutLayerNeuNum];double *expect = new double[this->OutLayerNeuNum];for (int i = 0; i < Set->GetRows(); ++i){for (int j = 0; j < Set->GetInputNum(); ++j)this->InputNeurons[j]->SetInput(Set->GetDataMap()[i][j]);this->GetOutput(output);for (int j = 0; j < Set->GetOutputNum(); ++j){double o = output[j], e = Set->GetDataMap()[i][Set->GetInputNum() + j];if (Set->GetMax()){o = (o - 0.1) / 0.8 * Set->GetMax()[Set->GetInputNum() + j];e = (e - 0.1) / 0.8 * Set->GetMax()[Set->GetInputNum() + j];}cout << "output: ";cout << o << "\t";cout << "expect: ";cout << e << "\t";}cout << endl;}cout << endl;cout << "in to hide W:" << endl;for (int i = 0; i < this->HideLayerNeuNum; ++i){for (int j = 0; j < this->InLayerNeuNum; ++j){cout << this->HidenNeurons[i]->GetWeight()[j] << "  ";}cout << endl;}cout << endl;cout << "hide to out W:" << endl;for (int i = 0; i < this->OutLayerNeuNum; ++i){for (int j = 0; j < this->HideLayerNeuNum; ++j){cout << this->OutputNeurons[i]->GetWeight()[j] << "  ";}cout << endl;}}};int main(){/*DataSet *trainingSet = new DataSet(2, 1);trainingSet->AddRow(new double[2]{ 1,1 }, new double[1]{ 0 });trainingSet->AddRow(new double[2]{ 1,0 }, new double[1]{ 1 });trainingSet->AddRow(new double[2]{ 0,1 }, new double[1]{ 1 });trainingSet->AddRow(new double[2]{ 0,0 }, new double[1]{ 0 });*/DataSet *trainingSet = new DataSet(4, 1);trainingSet->AddRow(new double[4]{ 3710,3690,3890,3695 }, new double[1]{ 3666 });trainingSet->AddRow(new double[4]{ 3690,3890,3695,3666 }, new double[1]{ 3692 });trainingSet->AddRow(new double[4]{ 3890,3695,3666,3692 }, new double[1]{ 3886 });trainingSet->AddRow(new double[4]{ 3695,3666,3692,3886 }, new double[1]{ 3914 });trainingSet->AddRow(new double[4]{ 3666,3692,3886,3914 }, new double[1]{ 3956 });trainingSet->AddRow(new double[4]{ 3692,3886,3914,3956 }, new double[1]{ 3953 });trainingSet->Normaliz();//层激励函数类型 神经元个数... 学习速率MultiLayerPerceptron *m = new MultiLayerPerceptron(FUNCTYPE_LINEAR, 4, FUNCTYPE_SIGMOID, 9, FUNCTYPE_SIGMOID, 1, 0.9);//学习1000次for (int i = 0; i < 10000; ++i)m->Learn(trainingSet);DataSet *TestSet = new DataSet(4, 1);TestSet->AddRow(new double[4]{ 3886,3914,3956,3953 }, new double[1]{4044});TestSet->Normaliz();//m->Test(TestSet);m->Test_Reget(TestSet);system("pause");return 0;}

代码一复制上来就特别丑 伤心。。。

0 0
原创粉丝点击