国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 學院 > 開發設計 > 正文

神經網絡與深度學習(二又二分之一)cpp-二的補充

2019-11-14 13:02:59
字體:
來源:轉載
供稿:網友

承接二

補充了DataSet的歸一函數

void Normaliz()	{		if (this->Max)			delete Max;		this->Max = new double[this->InputNum + this->OutputNum];		for (int i = 0; i < this->InputNum + this->OutputNum; ++i)		{			double max = DBL_MIN;			for (int j = 0; j < DataMap.size(); ++j)				max = (max > DataMap[j][i] ? max : DataMap[j][i]);			for (int j = 0; j < DataMap.size(); ++j)			{				DataMap[j][i] = (DataMap[j][i] / max) * 0.8 + 0.1;			}			Max[i] = max;		}	}為神經網絡添加了一個 帶還原功能的Test

void Test_Reget(DataSet *Set)	{		double *output = new double[this->OutLayerNeuNum];		double *expect = new double[this->OutLayerNeuNum];		for (int i = 0; i < Set->GetRows(); ++i)		{			for (int j = 0; j < Set->GetInputNum(); ++j)				this->InputNeurons[j]->SetInput(Set->GetDataMap()[i][j]);			this->GetOutput(output);			for (int j = 0; j < Set->GetOutputNum(); ++j)			{				double o = output[j], e = Set->GetDataMap()[i][Set->GetInputNum() + j];				if (Set->GetMax())				{					o = (o - 0.1) / 0.8 * Set->GetMax()[Set->GetInputNum() + j];					e = (e - 0.1) / 0.8 * Set->GetMax()[Set->GetInputNum() + j];				}				cout << "output: ";				cout << o << "/t";				cout << "expect: ";				cout << e << "/t";			}			cout << endl;		}		cout << endl;		cout << "in to hide W:" << endl;		for (int i = 0; i < this->HideLayerNeuNum; ++i)		{			for (int j = 0; j < this->InLayerNeuNum; ++j)			{				cout << this->HidenNeurons[i]->GetWeight()[j] << "  ";			}			cout << endl;		}		cout << endl;		cout << "hide to out W:" << endl;		for (int i = 0; i < this->OutLayerNeuNum; ++i)		{			for (int j = 0; j < this->HideLayerNeuNum; ++j)			{				cout << this->OutputNeurons[i]->GetWeight()[j] << "  ";			}			cout << endl;		}	}

以下為 新的DataSet 以及 MultiLayerPerceptron 。TransferFunc增加了一些函數但是還沒用過 將書上預測股票的前五個試了 有一定誤差 不過數據密集 沒啥價值

DataSet

//DateSet.h//created by WK#ifndef DATASET_H#define DATASET_H#include <vector>#include <cfloat>using namespace std;//數據集class DataSet{PRivate:	int					InputNum;	int					OutputNum;	vector<double*>		DataMap;	double				*Max;public:	DataSet(int inputnum, int outputnum)	{		this->InputNum	=	inputnum;		this->OutputNum =	outputnum;		Max				=	NULL;	}	void AddRow(double *inputArray, double *outputArray)	{		double *data = new double[this->InputNum + this->OutputNum];		for (int i = 0; i<this->InputNum; ++i)		{			data[i] = inputArray[i];		}		for (int i = 0; i<this->OutputNum; ++i)		{			data[InputNum + i] = outputArray[i];		}		this->DataMap.push_back(data);	}	void Normaliz()	{		if (this->Max)			delete Max;		else			Max = new double[this->InputNum + this->OutputNum];		for (int i = 0; i < this->InputNum + this->OutputNum; ++i)		{			double max = DBL_MIN;			for (int j = 0; j < DataMap.size(); ++j)				max = (max > DataMap[j][i] ? max : DataMap[j][i]);			for (int j = 0; j < DataMap.size(); ++j)			{				DataMap[j][i] = (DataMap[j][i] / max) * 0.8 + 0.1;			}			Max[i] = max;		}	}	double *GetMax()	{		return this->Max;	}	int GetInputNum()	{		return this->InputNum;	}	int GetOutputNum()	{		return this->OutputNum;	}	int GetRows()	{		return DataMap.size();	}	vector<double*> GetDataMap()	{		return DataMap;	} };#endif // !DATASET_HTransferFunc

//TransferFunc.h//created by WK#ifndef TRANSFERFUNC_H#define TRANSFERFUNC_H#include <cmath>enum Functypes{	FUNCTYPE_TANH,	FUNCTYPE_STEP,	FUNCTYPE_LINEAR,	FUNCTYPE_SIGMOID,	FUNCTYPE_SGN,	FUNVTYPE_RAMP};class Function{private:	double Step(double input)	{		if (input <= 0)			return 0;		else			return 1;	}	double Linear(double input)	{		return input;	}	double Sigmoid(double input)	{		return 1.0 / (1.0 + exp(-1.0*input));	}	double Sgn(double input)	{		if (input < 0)			return -1;		else			return 1;	}	double Ramp(double input)	{		if (input < 0)			return 0;		else if (input >= 0 && input <= 1)			return input;		else			return 1;	}public:	double GetResult(int funcType, double input)	{		switch (funcType)		{		case FUNCTYPE_TANH:			return tanh(input);		case FUNCTYPE_STEP:			return Step(input);		case FUNCTYPE_LINEAR:			return Linear(input);		case FUNCTYPE_SIGMOID:			return Sigmoid(input);		case FUNCTYPE_SGN:			return Sgn(input);		case FUNVTYPE_RAMP:			return Ramp(input);		default:			return input;		}	}};#endif // !TRANSFERFUNC_HMultiLayerPerceptron 

#include <vector>#include <iostream>#include "TransferFunc.h"#include "DataSet.h"#include <time.h>#include <cstdlib>using namespace std;#define WINITVALUE 0.001#define TINITVALUE 0//神經元class Neuron{private:	double				Input;	double				Output;	double				Threshold;	double				*Last_weight;			//神經元維護后向的權重	int					LastLayerNeuNum;	int					TransferFunctionType;	Function			Transferfunction;public:	Neuron(double threshold, int lastlayerneunum, int funcType)	{		this->Input					=	0;		this->Output				=	0;		this->Threshold				=	threshold;		this->LastLayerNeuNum		=	lastlayerneunum;		this->TransferFunctionType	=	funcType;		this->Last_weight			=	new double[lastlayerneunum];		//關鍵的初始化權值		for (int i = 0; i < lastlayerneunum; ++i)			this->Last_weight[i] = (2.0*(double)rand() / RAND_MAX) - 1;			}	void SetInput(double input)	{		this->Input = input;	}	double GetOutput()	{		this->Output = Transferfunction.GetResult(this->TransferFunctionType, this->Input - this->Threshold);		return this->Output;	}	double* GetThreshold()	{		return &this->Threshold;	}	double *GetWeight()	{		return this->Last_weight;	}	void SetFuncType(int functype)	{		this->TransferFunctionType = functype;	}};//多層感知機class MultiLayerPerceptron{private:	int			OutTransfetFunctionType;	int			HideTransfetFunctionType;	int			InTransfetFunctionType;	int			InLayerNeuNum;	int			HideLayerNeuNum;	int			OutLayerNeuNum;	double		Speed;	Neuron		**InputNeurons;	Neuron		**OutputNeurons;	Neuron		**HidenNeurons;public:	MultiLayerPerceptron(int intransferfunctiontype, int inLayerNeuNum, int hidetransferfunctiontype, int hideLayerNeuNum, int outtransferfunctiontype, int outLayerNeuNum, double speed)	{		this->InTransfetFunctionType	=	intransferfunctiontype;		this->HideTransfetFunctionType	=	hidetransferfunctiontype;		this->OutTransfetFunctionType	=	outtransferfunctiontype;		this->InLayerNeuNum				=	inLayerNeuNum;		this->HideLayerNeuNum			=	hideLayerNeuNum;		this->OutLayerNeuNum			=	outLayerNeuNum;		this->Speed						=	speed;		this->InputNeurons	= (Neuron**)new void*[inLayerNeuNum];		for (int i = 0; i < inLayerNeuNum; ++i)			this->InputNeurons[i] = new Neuron(TINITVALUE, 0, intransferfunctiontype);		this->HidenNeurons	= (Neuron**)new void*[hideLayerNeuNum];		for (int i = 0; i < hideLayerNeuNum; ++i)			this->HidenNeurons[i] = new Neuron(TINITVALUE, inLayerNeuNum, hidetransferfunctiontype);		this->OutputNeurons = (Neuron**)new void*[outLayerNeuNum];		for (int i = 0; i < outLayerNeuNum; ++i)			this->OutputNeurons[i] = new Neuron(TINITVALUE, hideLayerNeuNum, outtransferfunctiontype);	}	//獲取正向的輸出	void GetOutput(double *output)	{		double sum;		for (int i = 0; i < this->HideLayerNeuNum; ++i)		{			sum = 0;			for (int j = 0; j < this->InLayerNeuNum; ++j)				sum += this->HidenNeurons[i]->GetWeight()[j] * this->InputNeurons[j]->GetOutput();			this->HidenNeurons[i]->SetInput(sum);		}		for (int i = 0; i < this->OutLayerNeuNum; ++i)		{			sum = 0;			for (int j = 0; j < this->HideLayerNeuNum; ++j)				sum += this->OutputNeurons[i]->GetWeight()[j] * this->HidenNeurons[j]->GetOutput();			this->OutputNeurons[i]->SetInput(sum);			output[i] = this->OutputNeurons[i]->GetOutput();		}	}	//學習所有數據一次	void Learn(DataSet *trainingSet)	{		double *expect;		double *data;		double *output = new double[this->OutLayerNeuNum];		for (int i = 0; i < trainingSet->GetRows(); ++i)		{			data	= trainingSet->GetDataMap()[i];			expect	= data + trainingSet->GetInputNum();			for (int j = 0; j < trainingSet->GetInputNum(); ++j)				this->InputNeurons[j]->SetInput(data[j]);			this->GetOutput(output);			//更改隱藏層到輸出層權重以及閾值			//更新公式詳見機器學習			for (int j = 0; j < this->OutLayerNeuNum; ++j)			{						double delta = this->Speed * output[j] * (1 - output[j]) * (expect[j] - output[j]);				for (int k = 0; k < this->HideLayerNeuNum; ++k)					this->OutputNeurons[j]->GetWeight()[k] += (delta * this->HidenNeurons[k]->GetOutput());				*this->OutputNeurons[j]->GetThreshold() -= delta;			}			//更改輸入層到隱藏層的權重以及閾值			//更新公式詳見機器學習			for (int j = 0; j < this->HideLayerNeuNum; ++j)			{				double t = 0;				for (int k = 0; k < this->OutLayerNeuNum; ++k)					t += (this->OutputNeurons[k]->GetWeight()[j] * output[k] * (1 - output[k])*(expect[k] - output[k]));				double delta = this->HidenNeurons[j]->GetOutput() * (1 - this->HidenNeurons[j]->GetOutput()) * t;				for (int k = 0; k < this->InLayerNeuNum; ++k)					this->HidenNeurons[j]->GetWeight()[k] += (this->Speed * this->InputNeurons[k]->GetOutput() * delta);				*this->HidenNeurons[j]->GetThreshold() -= (this->Speed * delta);			}		}	}	void Test(DataSet *Set)	{		double *output = new double[this->OutLayerNeuNum];		double *expect = new double[this->OutLayerNeuNum];				for (int i = 0; i < Set->GetRows(); ++i)		{			for (int j = 0; j < Set->GetInputNum(); ++j)				this->InputNeurons[j]->SetInput(Set->GetDataMap()[i][j]);			this->GetOutput(output);			for (int j = 0; j < Set->GetOutputNum(); ++j)			{				cout << "output: ";				cout << output[j] << "/t";				cout << "expect: ";				cout << Set->GetDataMap()[i][Set->GetInputNum()+j]<<"/t";			}			cout << endl;		} 		cout << endl;		cout << "in to hide W:" << endl;		for (int i = 0; i < this->HideLayerNeuNum; ++i)		{			for (int j = 0; j < this->InLayerNeuNum; ++j)			{				cout << this->HidenNeurons[i]->GetWeight()[j] << "  ";			}			cout << endl;		}		cout << endl;		cout << "hide to out W:" << endl;		for (int i = 0; i < this->OutLayerNeuNum; ++i)		{			for (int j = 0; j < this->HideLayerNeuNum; ++j)			{				cout << this->OutputNeurons[i]->GetWeight()[j] << "  ";			}			cout << endl;		}	}	void Test_Reget(DataSet *Set)	{		double *output = new double[this->OutLayerNeuNum];		double *expect = new double[this->OutLayerNeuNum];		for (int i = 0; i < Set->GetRows(); ++i)		{			for (int j = 0; j < Set->GetInputNum(); ++j)				this->InputNeurons[j]->SetInput(Set->GetDataMap()[i][j]);			this->GetOutput(output);			for (int j = 0; j < Set->GetOutputNum(); ++j)			{				double o = output[j], e = Set->GetDataMap()[i][Set->GetInputNum() + j];				if (Set->GetMax())				{					o = (o - 0.1) / 0.8 * Set->GetMax()[Set->GetInputNum() + j];					e = (e - 0.1) / 0.8 * Set->GetMax()[Set->GetInputNum() + j];				}				cout << "output: ";				cout << o << "/t";				cout << "expect: ";				cout << e << "/t";			}			cout << endl;		}		cout << endl;		cout << "in to hide W:" << endl;		for (int i = 0; i < this->HideLayerNeuNum; ++i)		{			for (int j = 0; j < this->InLayerNeuNum; ++j)			{				cout << this->HidenNeurons[i]->GetWeight()[j] << "  ";			}			cout << endl;		}		cout << endl;		cout << "hide to out W:" << endl;		for (int i = 0; i < this->OutLayerNeuNum; ++i)		{			for (int j = 0; j < this->HideLayerNeuNum; ++j)			{				cout << this->OutputNeurons[i]->GetWeight()[j] << "  ";			}			cout << endl;		}	}};int main(){	/*DataSet *trainingSet = new DataSet(2, 1);	trainingSet->AddRow(new double[2]{ 1,1 }, new double[1]{ 0 });	trainingSet->AddRow(new double[2]{ 1,0 }, new double[1]{ 1 });	trainingSet->AddRow(new double[2]{ 0,1 }, new double[1]{ 1 });	trainingSet->AddRow(new double[2]{ 0,0 }, new double[1]{ 0 });*/	DataSet *trainingSet = new DataSet(4, 1);	trainingSet->AddRow(new double[4]{ 3710,3690,3890,3695 }, new double[1]{ 3666 });	trainingSet->AddRow(new double[4]{ 3690,3890,3695,3666 }, new double[1]{ 3692 });	trainingSet->AddRow(new double[4]{ 3890,3695,3666,3692 }, new double[1]{ 3886 });	trainingSet->AddRow(new double[4]{ 3695,3666,3692,3886 }, new double[1]{ 3914 });	trainingSet->AddRow(new double[4]{ 3666,3692,3886,3914 }, new double[1]{ 3956 });	trainingSet->AddRow(new double[4]{ 3692,3886,3914,3956 }, new double[1]{ 3953 });	trainingSet->Normaliz();	//層激勵函數類型 神經元個數... 學習速率	MultiLayerPerceptron *m = new MultiLayerPerceptron(FUNCTYPE_LINEAR, 4, FUNCTYPE_SIGMOID, 9, FUNCTYPE_SIGMOID, 1, 0.9);	//學習1000次	for (int i = 0; i < 10000; ++i)		m->Learn(trainingSet);	DataSet *TestSet = new DataSet(4, 1);	TestSet->AddRow(new double[4]{ 3886,3914,3956,3953 }, new double[1]{4044});	TestSet->Normaliz();	//m->Test(TestSet);	m->Test_Reget(TestSet);	system("pause");	return 0;}代碼一復制上來就特別丑 傷心。。。


發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
主站蜘蛛池模板: 菏泽市| 龙川县| 卓尼县| 香格里拉县| 理塘县| 桓台县| 衡阳县| 凉城县| 饶阳县| 曲松县| 成都市| 龙州县| 逊克县| 磴口县| 南康市| 怀来县| 龙里县| 无极县| 嘉鱼县| 铜鼓县| 聂拉木县| 朝阳县| 和平县| 峨眉山市| 涞源县| 石河子市| 德化县| 从化市| 太和县| 南阳市| 鹿邑县| 白水县| 革吉县| 桃江县| 江山市| 扶绥县| 安溪县| 孝义市| 奈曼旗| 米易县| 大田县|