亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? gneuralnet.cpp

?? 一個非常有用的開源代碼
?? CPP
?? 第 1 頁 / 共 2 頁
字號:
/*	Copyright (C) 2006, Mike Gashler	This library is free software; you can redistribute it and/or	modify it under the terms of the GNU Lesser General Public	License as published by the Free Software Foundation; either	version 2.1 of the License, or (at your option) any later version.	see http://www.gnu.org/copyleft/lesser.html*/#include "GNeuralNet.h"#include "GArff.h"#include "GMath.h"#include "GMacros.h"#include "GArray.h"#include "GBits.h"class GNeuron;#define INIT_THRESH .15#define OUTPUT_MIN .1#define OUTPUT_MIDDLE .5#define OUTPUT_RANGE .8#define INPUT_MIN -.7#define INPUT_RANGE 1.4struct GSynapse{	double m_dWeight; // Synapse	double m_dWeightDelta;	GNeuron* m_pInput;	GNeuron* m_pOutput;	GSynapse()	{		m_dWeight = (GBits::GetRandomDouble() * INIT_THRESH) - (INIT_THRESH / 2);		m_dWeightDelta = 0;		m_pInput = NULL;		m_pOutput = NULL;	}};class GNeuron{public:	GNeuron() {}	virtual ~GNeuron() {}	virtual double PullEvalDownStream() = 0;	virtual double PullErrorBackUpStream() = 0;	virtual double GetOutput() = 0;	virtual void SetOutput(double d) = 0;	virtual void SetError(double d) = 0;	virtual void AjustWeights(double dLearningRate, double dMomentum) = 0;	virtual int SerializeWeights(double* pBuffer) = 0;	virtual int DeserializeWeights(double* pBuffer) = 0;	virtual void AddInput(GNeuron* pNeuron) = 0;	virtual void Print() = 0;	virtual void BatchUpdateDeltas(double dLearningRate) = 0;	virtual void BatchUpdateWeights() = 0;	// For internal use only	virtual void ConnectOutputToSynapse(GSynapse* pSynapse) = 0;	virtual void RemapSynapse(GSynapse* pOld, GSynapse* pNew) = 0;};class GStandardNeuron : public GNeuron{protected:	double m_dOutput; // Axon	double m_dError;	int m_nInputs;	int m_nInputSpace;	struct GSynapse* m_pInputs;	int m_nOutputs;	int m_nOutputSpace;	struct GSynapse** m_pOutputs;public:	GStandardNeuron() : GNeuron()	{		m_dOutput = 1e50;		m_dError = 1e50;		m_nInputs = 0;		m_nInputSpace = 0;		m_pInputs = NULL;		AddInput(NULL); // Add the constant 1 (for bias) input		m_nOutputs = 0;		m_nOutputSpace = 0;		m_pOutputs = NULL;	}	virtual ~GStandardNeuron()	{		delete [] m_pInputs;		delete(m_pOutputs);	}	virtual void Print()	{		int n;		for(n = 0; n < m_nInputs; n++)			printf("\t%lf\n", m_pInputs[n].m_dWeight);	}	virtual double PullEvalDownStream()	{		if(m_dOutput == 1e50)		{			// Sum up the weighted inputs			double dSum = m_pInputs[0].m_dWeight;			int n;			for(n = 1; n < m_nInputs; n++)				dSum += (m_pInputs[n].m_dWeight * m_pInputs[n].m_pInput->PullEvalDownStream());			// Squash the sum			m_dOutput = GMath::sigmoid(dSum, 1);		}		return m_dOutput;	}	virtual double PullErrorBackUpStream()	{		GAssert(m_dOutput != 1e50, "output was not calculated");		if(m_dError == 1e50)		{			// Sum up the errors from each output			double dSum = 0;			int n;			for(n = 0; n < m_nOutputs; n++)				dSum += (m_pOutputs[n]->m_dWeight * m_pOutputs[n]->m_pOutput->PullErrorBackUpStream());			// Multiply by derivative of squashing function			m_dError = dSum * m_dOutput * ((double)1 - m_dOutput);		}		return m_dError;	}	virtual double GetOutput()	{		GAssert(m_dOutput != 1e50, "output was not calculated");		return m_dOutput;	}	virtual void SetOutput(double d)	{		m_dOutput = d;	}	virtual void SetError(double d)	{		m_dError = d;	}	virtual void AjustWeights(double dLearningRate, double dMomentum)	{		GAssert(m_dError != 1e50, "output was not calculated");		GSynapse* pNeuronRef;		pNeuronRef = &m_pInputs[0];		pNeuronRef->m_dWeightDelta *= dMomentum;		pNeuronRef->m_dWeightDelta += (dLearningRate * m_dError);		pNeuronRef->m_dWeight += pNeuronRef->m_dWeightDelta;		int n;		for(n = 1; n < m_nInputs; n++)		{			pNeuronRef = &m_pInputs[n];			pNeuronRef->m_dWeightDelta *= dMomentum;			pNeuronRef->m_dWeightDelta += (dLearningRate * m_dError * pNeuronRef->m_pInput->GetOutput());			pNeuronRef->m_dWeight += pNeuronRef->m_dWeightDelta;		}	}	virtual void BatchUpdateDeltas(double dLearningRate)	{		GAssert(m_dError != 1e50, "output was not calculated");		GSynapse* pNeuronRef;		pNeuronRef = &m_pInputs[0];		pNeuronRef->m_dWeightDelta += (dLearningRate * m_dError);		int n;		for(n = 1; n < m_nInputs; n++)		{			pNeuronRef = &m_pInputs[n];			pNeuronRef->m_dWeightDelta += (dLearningRate * m_dError * pNeuronRef->m_pInput->GetOutput());		}	}	virtual void BatchUpdateWeights()	{		GSynapse* pNeuronRef;		pNeuronRef = &m_pInputs[0];		pNeuronRef->m_dWeight += pNeuronRef->m_dWeightDelta;		pNeuronRef->m_dWeightDelta = 0;		int n;		for(n = 1; n < m_nInputs; n++)		{			pNeuronRef = &m_pInputs[n];			pNeuronRef->m_dWeight += pNeuronRef->m_dWeightDelta;			pNeuronRef->m_dWeightDelta = 0;		}	}	virtual int SerializeWeights(double* pBuffer)	{		if(pBuffer)		{			int n;			for(n = 0; n < m_nInputs; n++)				pBuffer[n] = m_pInputs[n].m_dWeight;		}		return m_nInputs;	}	virtual int DeserializeWeights(double* pBuffer)	{		int n;		for(n = 0; n < m_nInputs; n++)			m_pInputs[n].m_dWeight = pBuffer[n];		return m_nInputs;	}	virtual void AddInput(GNeuron* pNeuron)	{		if(m_nInputs >= m_nInputSpace)		{			// Reallocate input space			int nInputSpace = MAX(4, m_nInputSpace * 2);			GSynapse* pInputs = new GSynapse[nInputSpace];			memcpy(pInputs, m_pInputs, sizeof(GSynapse) * m_nInputSpace);			// Remap everything			int n;			for(n = 0; n < m_nInputs; n++)			{				if(m_pInputs[n].m_pInput)					m_pInputs[n].m_pInput->RemapSynapse(&m_pInputs[n], &pInputs[n]);			}			delete(m_pInputs);			m_pInputs = pInputs;			m_nInputSpace = nInputSpace;		}		m_pInputs[m_nInputs].m_pInput = pNeuron;		m_pInputs[m_nInputs].m_pOutput = this;		if(pNeuron)			pNeuron->ConnectOutputToSynapse(&m_pInputs[m_nInputs]);		else		{			GAssert(m_nInputs == 0, "only the first input should be NULL");		}		m_nInputs++;	}	virtual void RemapSynapse(GSynapse* pOld, GSynapse* pNew)	{		int n;		for(n = 0; n < m_nOutputs; n++)		{			if(m_pOutputs[n] == pOld)			{				m_pOutputs[n] = pNew;				break;			}		}	}protected:	virtual void ConnectOutputToSynapse(GSynapse* pSynapse)	{		if(m_nOutputs >= m_nOutputSpace)		{			// Reallocate output space			int nOutputSpace = MAX(4, m_nOutputSpace * 2);			GSynapse** pOutputs = new GSynapse*[nOutputSpace];			memcpy(pOutputs, m_pOutputs, sizeof(GSynapse*) * m_nOutputSpace);			delete(m_pOutputs);			m_pOutputs = pOutputs;			m_nOutputSpace = nOutputSpace;		}		m_pOutputs[m_nOutputs++] = pSynapse;	}};// ----------------------------------------------------------------------GNeuralNet::GNeuralNet(GArffRelation* pRelation): GSupervisedLearner(pRelation){	m_pInternalRelation = NULL;	m_pNeurons = new GPointerArray(64);	m_pBestSet = NULL;	m_nWeightCount = 0;	m_nInputStart = 0;	m_nLayerStart = 0;	m_nLayerSize = 0;	m_pMinAndRanges = NULL;	MakeInternalRelationAndOutputLayer();	// Default settings	m_dLearningRate = .215;	m_dLearningDecay = 1;	m_dMomentum = .9;	m_nRunEpochs = 4000;	m_nMaximumEpochs = 50000;	m_nEpochsPerValidationCheck = 5;	m_dAcceptableMeanSquareError = 0.000001;	m_dTrainingPortion = .65;	// Step training	m_pTrainingDataInternal = NULL;	m_pValidationDataInternal = NULL;}GNeuralNet::~GNeuralNet(){	int nCount = m_pNeurons->GetSize();	int n;	for(n = 0; n < nCount; n++)		delete((GNeuron*)m_pNeurons->GetPointer(n));	delete(m_pNeurons);	delete(m_pBestSet);	delete(m_pMinAndRanges);	delete(m_pInternalRelation);	ReleaseInternalData();}void GNeuralNet::ReleaseInternalData(){	if(m_pValidationDataInternal != m_pTrainingDataInternal)		delete(m_pValidationDataInternal);	delete(m_pTrainingDataInternal);	m_pTrainingDataInternal = NULL;	m_pValidationDataInternal = NULL;}void GNeuralNet::MakeInternalRelationAndOutputLayer(){	// Make the internal relation	GAssert(m_pInternalRelation == NULL, "already created the internal relation");	m_pInternalRelation = new GArffRelation();	// Add the internal input nodes	GArffAttribute* pAttr;	int nValueCount;	int nInputCount = m_pRelation->GetInputCount();	int n, i;	for(n = 0; n < nInputCount; n++)	{		pAttr = m_pRelation->GetAttribute(m_pRelation->GetInputIndex(n));		if(pAttr->IsContinuous())			m_pInternalRelation->AddAttribute(new GArffAttribute(true, 0, NULL));		else		{			nValueCount = pAttr->GetValueCount();			if(nValueCount <= 2)				m_pInternalRelation->AddAttribute(new GArffAttribute(true, 0, NULL));			else			{				for(i = 0; i < nValueCount; i++)					m_pInternalRelation->AddAttribute(new GArffAttribute(true, 0, NULL));			}		}	}	// Add the internal output nodes	int nOutputCount = m_pRelation->GetOutputCount();	for(n = 0; n < nOutputCount; n++)	{		pAttr = m_pRelation->GetAttribute(m_pRelation->GetOutputIndex(n));		if(pAttr->IsContinuous())			m_pInternalRelation->AddAttribute(new GArffAttribute(false, 0, NULL));		else		{			nValueCount = pAttr->GetValueCount();			if(nValueCount <= 2)				m_pInternalRelation->AddAttribute(new GArffAttribute(false, 0, NULL));			else			{				for(i = 0; i < nValueCount; i++)					m_pInternalRelation->AddAttribute(new GArffAttribute(false, 0, NULL));			}		}	}	// Make the output layer	AddLayer(m_pInternalRelation->GetOutputCount());}void GNeuralNet::MakeInputLayer(){	GAssert(m_nInputStart == 0, "already made the input layer");	m_nInputStart = m_pNeurons->GetSize();	AddLayer(m_pInternalRelation->GetInputCount());}void GNeuralNet::InputsToInternal(double* pExternal, double* pInternal){	GAssert(m_pMinAndRanges, "min and ranges not calculated yet");	GArffAttribute* pAttr;	int nValueCount;	int nInputCount = m_pRelation->GetInputCount();	int nInternalIndex = 0;	int n, i, nExternalIndex;	for(n = 0; n < nInputCount; n++)	{		nExternalIndex = m_pRelation->GetInputIndex(n);		pAttr = m_pRelation->GetAttribute(nExternalIndex);		if(pAttr->IsContinuous())			pInternal[nInternalIndex++] = GArffData::Normalize(pExternal[nExternalIndex], m_pMinAndRanges[nExternalIndex + nExternalIndex], m_pMinAndRanges[nExternalIndex + nExternalIndex + 1], INPUT_MIN, INPUT_RANGE);		else		{			nValueCount = pAttr->GetValueCount();			if(nValueCount <= 2)				pInternal[nInternalIndex++] = (pExternal[nExternalIndex] < .5 ? INPUT_MIN : INPUT_MIN + INPUT_RANGE);			else			{				for(i = 0; i < nValueCount; i++)					pInternal[nInternalIndex + i] = INPUT_MIN;				GAssert((int)pExternal[nExternalIndex] >= 0 && (int)pExternal[nExternalIndex] < nValueCount, "out of range");				pInternal[nInternalIndex + (int)pExternal[nExternalIndex]] = INPUT_MIN + INPUT_RANGE;				nInternalIndex += nValueCount;			}		}	}	GAssert(nInternalIndex == m_pInternalRelation->GetInputCount(), "error");}void GNeuralNet::OutputsToInternal(double* pExternal, double* pInternal){	GAssert(m_pMinAndRanges, "min and ranges not calculated yet");	GArffAttribute* pAttr;	int nValueCount;	int nOutputCount = m_pRelation->GetOutputCount();	int nInternalIndex = m_pInternalRelation->GetOutputIndex(0);	int n, i, nExternalIndex;	for(n = 0; n < nOutputCount; n++)	{		nExternalIndex = m_pRelation->GetOutputIndex(n);		pAttr = m_pRelation->GetAttribute(nExternalIndex);		if(pAttr->IsContinuous())			pInternal[nInternalIndex++] = GArffData::Normalize(pExternal[nExternalIndex], m_pMinAndRanges[nExternalIndex + nExternalIndex], m_pMinAndRanges[nExternalIndex + nExternalIndex + 1], OUTPUT_MIN, OUTPUT_RANGE);		else		{			nValueCount = pAttr->GetValueCount();			if(nValueCount <= 2)				pInternal[nInternalIndex++] = (pExternal[nExternalIndex] < .5 ? OUTPUT_MIN : OUTPUT_MIN + OUTPUT_RANGE);			else			{				for(i = 0; i < nValueCount; i++)					pInternal[nInternalIndex + i] = OUTPUT_MIN;				GAssert((int)pExternal[nExternalIndex] >= 0 && (int)pExternal[nExternalIndex] < nValueCount, "out of range");				pInternal[nInternalIndex + (int)pExternal[nExternalIndex]] = OUTPUT_MIN + OUTPUT_RANGE;				nInternalIndex += nValueCount;			}		}	}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产精品美女久久久久久久 | 久久综合久久99| 国产三区在线成人av| 亚洲综合一区二区三区| 久久99国产精品免费| av电影天堂一区二区在线| 制服丝袜成人动漫| 亚洲欧美另类图片小说| 经典一区二区三区| 欧美日韩免费电影| 国产精品二三区| 麻豆91在线看| 欧美三级中文字| 国产精品午夜春色av| 久久电影国产免费久久电影| 日本韩国欧美在线| 中文字幕一区二区不卡| 国产不卡视频在线播放| 日韩欧美激情在线| 日韩av电影天堂| 欧美亚洲国产怡红院影院| 国产精品国产自产拍高清av王其| 免费亚洲电影在线| 在线不卡的av| 亚洲成av人片一区二区梦乃| av中文字幕亚洲| 亚洲欧洲性图库| 成人黄色小视频| 亚洲国产精品99久久久久久久久| 男女性色大片免费观看一区二区 | 成人午夜电影久久影院| 日韩午夜中文字幕| 日韩av电影天堂| 5858s免费视频成人| 亚洲国产毛片aaaaa无费看| 色婷婷综合激情| 亚洲综合色自拍一区| 91精品办公室少妇高潮对白| 亚洲天堂精品在线观看| 色综合天天综合狠狠| 专区另类欧美日韩| 色综合欧美在线视频区| 亚洲日本一区二区| 91极品美女在线| 午夜久久久久久久久久一区二区| 欧美在线视频你懂得| 亚洲成国产人片在线观看| 欧美裸体一区二区三区| 日本特黄久久久高潮| 久久综合久久鬼色| av激情成人网| 午夜精品久久久久影视| 日韩视频免费观看高清完整版 | 国产成人精品影视| 国产精品乱子久久久久| 91在线视频观看| 亚洲一区二区三区美女| 日韩亚洲电影在线| 国产精品12区| 亚洲欧美偷拍卡通变态| 欧美日韩另类一区| 国产精品一区一区| 自拍偷拍亚洲欧美日韩| 欧美日韩aaaaaa| 粉嫩一区二区三区性色av| 亚洲精品国产a久久久久久| 欧美日韩国产小视频| 狠狠狠色丁香婷婷综合久久五月| 中文字幕永久在线不卡| 欧美乱妇15p| 国产91高潮流白浆在线麻豆 | 3atv在线一区二区三区| 国产高清在线观看免费不卡| 亚洲天堂av一区| 日韩一区二区电影在线| 久久精品一区二区三区av| 日韩一区二区免费高清| 国产91对白在线观看九色| 亚洲国产欧美日韩另类综合| 欧美哺乳videos| 91久久精品一区二区| 国产资源精品在线观看| 亚洲综合色网站| 久久网站最新地址| 欧美色爱综合网| 不卡一区二区三区四区| 美腿丝袜亚洲综合| 一区二区三区中文字幕在线观看| 精品福利在线导航| 欧美色中文字幕| 成人激情视频网站| 男男成人高潮片免费网站| 亚洲精品免费看| 国产亚洲欧美中文| 日韩欧美电影一区| 欧美日韩精品一区二区三区四区 | 美国一区二区三区在线播放| 国产精品亚洲人在线观看| 一区二区三区欧美激情| 久久人人超碰精品| 欧美一区二区在线不卡| 在线免费av一区| 99久久精品免费观看| 大胆欧美人体老妇| 国产精品77777| 精品一区二区三区久久| 日本免费新一区视频| 污片在线观看一区二区| 亚洲综合成人在线| 亚洲视频一二三| 国产精品美女久久久久久久久| 久久久精品中文字幕麻豆发布| 欧美一级欧美三级在线观看| 91精品国产综合久久精品app| 国产精选一区二区三区| 亚洲国产精品ⅴa在线观看| 日韩欧美激情四射| 日韩一级免费观看| 国产精品传媒在线| 精品国产制服丝袜高跟| 欧美国产日本视频| 99riav一区二区三区| 成人三级伦理片| 粉嫩一区二区三区在线看| 国产91露脸合集magnet| 国产iv一区二区三区| 粉嫩欧美一区二区三区高清影视| 丁香桃色午夜亚洲一区二区三区| 国产综合色视频| 波多野结衣在线一区| 北岛玲一区二区三区四区| 99久久免费国产| 在线观看亚洲专区| 日韩午夜激情电影| 26uuu亚洲综合色| 欧美高清在线视频| 一区二区久久久| 日本女优在线视频一区二区 | 欧美一区二区三区爱爱| 欧美片在线播放| 欧美不卡一区二区| 国产精品毛片大码女人| 成人午夜视频在线| 欧美日韩在线电影| 26uuu亚洲| 国产精品正在播放| 国产精品99久久久久久久女警| 国产精品中文字幕欧美| 97久久精品人人爽人人爽蜜臀| 色8久久精品久久久久久蜜| 欧美高清视频不卡网| 26uuu另类欧美| 亚洲精品日日夜夜| 麻豆91在线看| 91美女福利视频| 欧美一级高清片在线观看| 日本一区二区三级电影在线观看| 1024成人网| 免费成人小视频| 99re亚洲国产精品| 欧美一区二区三区影视| 亚洲欧洲精品一区二区三区| 日韩在线a电影| av在线播放一区二区三区| 欧美夫妻性生活| 国产精品人人做人人爽人人添| 日韩av成人高清| 色综合咪咪久久| 欧美日韩黄色一区二区| 国产精品美女视频| 久久99精品国产| 国产精品久久久久久久久果冻传媒| 国产成人日日夜夜| 亚洲天天做日日做天天谢日日欢 | 日韩一区二区三区在线观看| 久久久国产精品麻豆| 亚洲成a人v欧美综合天堂| 懂色av一区二区三区免费看| 欧美羞羞免费网站| 国产精品成人午夜| 国产伦精品一区二区三区免费| 欧美特级限制片免费在线观看| 久久精品无码一区二区三区| 首页欧美精品中文字幕| 色综合久久88色综合天天6| 久久综合精品国产一区二区三区| 亚洲www啪成人一区二区麻豆 | 国产精品国产三级国产| 国产黑丝在线一区二区三区| 成人黄色a**站在线观看| 亚洲欧美日韩系列| 国产v综合v亚洲欧| 精品对白一区国产伦| 日韩精品成人一区二区三区| 色中色一区二区| 一区二区三区四区av| 99国产精品久久久久久久久久 | 亚洲天堂福利av| 成人av在线一区二区| 国产精品天美传媒沈樵|