亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? gneuralnet.cpp

?? 一個非常有用的開源代碼
?? CPP
?? 第 1 頁 / 共 2 頁
字號:
	GAssert(nInternalIndex == m_pInternalRelation->GetAttributeCount(), "error");}void GNeuralNet::OutputsToExternal(double* pInternal, double* pExternal){	GAssert(m_pMinAndRanges, "min and ranges not calculated yet");	GArffAttribute* pAttr;	int nValueCount;	int nOutputCount = m_pRelation->GetOutputCount();	int nInternalIndex = m_pInternalRelation->GetOutputIndex(0);	int n, i, nExternalIndex;	double dVal, dHighestVal;	for(n = 0; n < nOutputCount; n++)	{		nExternalIndex = m_pRelation->GetOutputIndex(n);		pAttr = m_pRelation->GetAttribute(nExternalIndex);		if(pAttr->IsContinuous())			pExternal[nExternalIndex] = GArffData::Normalize(pInternal[nInternalIndex++], OUTPUT_MIN, OUTPUT_RANGE, m_pMinAndRanges[nExternalIndex + nExternalIndex], m_pMinAndRanges[nExternalIndex + nExternalIndex + 1]);		else		{			nValueCount = pAttr->GetValueCount();			if(nValueCount <= 2)				pExternal[nExternalIndex] = (pInternal[nInternalIndex++] >= OUTPUT_MIDDLE ? 1 : 0);			else			{				pExternal[nExternalIndex] = 0;				dHighestVal = pInternal[nInternalIndex++];				for(i = 1; i < nValueCount; i++)				{					dVal = pInternal[nInternalIndex++];					if(dVal > dHighestVal)					{						pExternal[nExternalIndex] = i;						dHighestVal = dVal;					}				}			}		}	}	GAssert(nInternalIndex == m_pInternalRelation->GetAttributeCount(), "error");}void GNeuralNet::AddLayer(int nNodes){	int nPrevLayerStart = m_nLayerStart;	int nPrevLayerSize = m_nLayerSize;	m_nLayerStart = m_pNeurons->GetSize();	m_nLayerSize = nNodes;	int n, i;	for(n = 0; n < nNodes; n++)	{		GStandardNeuron* pNewNeuron = new GStandardNeuron();		m_pNeurons->AddPointer(pNewNeuron);		for(i = 0; i < nPrevLayerSize; i++)		{			GNeuron* pOldNeuron = (GNeuron*)m_pNeurons->GetPointer(nPrevLayerStart + i);			pOldNeuron->AddInput(pNewNeuron);		}	}}int GNeuralNet::GetWeightCount(){	if(m_nWeightCount == 0)	{		int n;		int nCount = m_pNeurons->GetSize();		GNeuron* pNeuron;		for(n = 0; n < nCount; n++)		{			pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);			m_nWeightCount += pNeuron->SerializeWeights(NULL);		}	}	return m_nWeightCount;}void GNeuralNet::GetWeights(double* pOutWeights){	// Serialize the weights	int nCount = m_pNeurons->GetSize();	int nPos = 0;	int n;	GNeuron* pNeuron;	for(n = 0; n < nCount; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		nPos += pNeuron->SerializeWeights(&pOutWeights[nPos]);	}	GAssert(nPos == m_nWeightCount, "serialization size inconsistent");}void GNeuralNet::SetWeights(double* pWeights){	int n;	int nCount = m_pNeurons->GetSize();	GNeuron* pNeuron;	int nPos = 0;	for(n = 0; n < nCount; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		nPos += pNeuron->DeserializeWeights(&pWeights[nPos]);	}	GAssert(nPos == m_nWeightCount, "serialization size inconsistent");}void GNeuralNet::UpdateBestWeights(){	if(!m_pBestSet)		m_pBestSet = new double[GetWeightCount()];	GetWeights(m_pBestSet);}void GNeuralNet::RestoreBestWeights(){	SetWeights(m_pBestSet);}void GNeuralNet::EvalInternal(double* pRow){	// Clear the outputs of all non-input neurons	GNeuron* pNeuron;	int n;	for(n = 0; n < m_nInputStart; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		pNeuron->SetOutput(1e50);	}	// Copy inputs into input neurons	int nInputs = m_pInternalRelation->GetInputCount();	int nCount = m_nInputStart + nInputs;	GAssert(nCount == m_pNeurons->GetSize(), "neurons added after input neurons?");	int nInput = 0;	for( ; n < nCount; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		pNeuron->SetOutput(pRow[nInput++]);	}	// Pull the evaluation downstream to the output nodes	int nOutputs = m_pInternalRelation->GetOutputCount();	for(n = 0; n < nOutputs; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		pNeuron->PullEvalDownStream();	}}void GNeuralNet::Eval(double* pRow){	// Convert to internal data	double* pInternalRow = (double*)alloca(sizeof(double) * m_pInternalRelation->GetAttributeCount());	InputsToInternal(pRow, pInternalRow);	// Do the evaluation	EvalInternal(pInternalRow);	// Extract the output values from the output nodes	GNeuron* pNeuron;	int n;	int nOutputs = m_pInternalRelation->GetOutputCount();	int nIndex = m_pInternalRelation->GetOutputIndex(0);	for(n = 0; n < nOutputs; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		pInternalRow[nIndex++] = pNeuron->GetOutput();	}	// Convert outputs to external data	OutputsToExternal(pInternalRow, pRow);}void GNeuralNet::Criticize(double* pModel){	// Calculate the error on all output nodes	GNeuron* pNeuron = NULL;	int n;	double dOutput;	int nOutputs = m_pInternalRelation->GetOutputCount();	int nIndex = m_pInternalRelation->GetOutputIndex(0);	for(n = 0; n < nOutputs; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		dOutput = pNeuron->GetOutput();		pNeuron->SetError((pModel[nIndex++] - dOutput) * dOutput * (1.0 - dOutput));	}	// Clear the error on the rest of the nodes	for( ; n <= m_nInputStart; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		pNeuron->SetError(1e50);	}	// Backpropagate the error (we only need to call PullErrorBackUpStream	// on one input neuron because any input is connected to all the nodes	// in the next layer and we don't need the error value for the inputs	pNeuron->PullErrorBackUpStream();}void GNeuralNet::MeasureMinAndRanges(GArffData* pTrainingData){	int nAttrCount = m_pRelation->GetAttributeCount();	delete(m_pMinAndRanges);	m_pMinAndRanges = new double[2 * nAttrCount];	GArffAttribute* pAttr;	int n;	for(n = 0; n < nAttrCount; n++)	{		pAttr = m_pRelation->GetAttribute(n);		if(pAttr->IsContinuous())		{			pTrainingData->GetMinAndRange(n, &m_pMinAndRanges[2 * n], &m_pMinAndRanges[2 * n + 1]);			if(m_pMinAndRanges[2 * n + 1] < .00001)				m_pMinAndRanges[2 * n + 1] = .00001;		}		else		{			m_pMinAndRanges[2 * n] = 0;			m_pMinAndRanges[2 * n + 1] = 0;		}	}}void GNeuralNet::ExternalToInternalData(GArffData* pExternal, GArffData* pInternal){	double* pExternalRow;	double* pInternalRow;	int n;	int nInternalAttributeCount = m_pInternalRelation->GetAttributeCount();	int nRowCount = pExternal->GetSize();	for(n = 0; n < nRowCount; n++)	{		pExternalRow = pExternal->GetVector(n);		pInternalRow = new double[nInternalAttributeCount];		InputsToInternal(pExternalRow, pInternalRow);		OutputsToInternal(pExternalRow, pInternalRow);		pInternal->AddVector(pInternalRow);	}}double GNeuralNet::TrainValidate(){	int n, i, nIndex;	GNeuron* pNeuron;	double* pRow;	double d;	double dError = 0;	int nCount = m_pValidationDataInternal->GetSize();	int nOutputs = m_pInternalRelation->GetOutputCount();	for(n = 0; n < nCount; n++)	{		pRow = m_pValidationDataInternal->GetVector(n);		EvalInternal(pRow);		nIndex = m_pInternalRelation->GetOutputIndex(0);		for(i = 0; i < nOutputs; i++)		{			pNeuron = (GNeuron*)m_pNeurons->GetPointer(i);			d = pRow[nIndex++] - pNeuron->GetOutput();			d *= d;			dError += d;		}	}	dError /= (nCount * nOutputs);	return dError;}void GNeuralNet::PrintNeurons(){	printf("-----------------\n");	GNeuron* pNeuron;	int n;	for(n = 0; n < m_nInputStart; n++)	{		pNeuron = (GNeuron*)m_pNeurons->GetPointer(n);		printf("Neuron %d\n", n);		pNeuron->Print();	}	printf("-----------------\n");}void GNeuralNet::Train(GArffData* pData){	int nTrainRows = (int)(m_dTrainingPortion * pData->GetSize());	GArffData* pValidateData = pData->SplitBySize(nTrainRows);	Train(pData, pValidateData);}int GNeuralNet::Train(GArffData* pTrainingData, GArffData* pValidationData){	TrainInit(pTrainingData, pValidationData);	// Do the epochs	int nEpochs;	double dBestError = 1e20;	int nEpochsSinceValidationCheck = 0;	int nBestEpoch = 0;	for(nEpochs = 0; true; nEpochs++)	{		TrainEpoch();		// Check for termination condition		nEpochsSinceValidationCheck++;		if(nEpochsSinceValidationCheck >= m_nEpochsPerValidationCheck)		{			nEpochsSinceValidationCheck = 0;			double dMeanSquareError = TrainValidate();//printf("Epoch: %d\tError=%lf\n", nEpochs, dBestError);			if(dMeanSquareError < dBestError)			{				// Found a new best set of weights				dBestError = dMeanSquareError;				nBestEpoch = nEpochs;				UpdateBestWeights();				if(dMeanSquareError <= m_dAcceptableMeanSquareError)					break;			}			else			{				// Test for termination condition				if(nEpochs - nBestEpoch >= m_nRunEpochs)					break;			}			if(nEpochs >= m_nMaximumEpochs)				break;		}	}	if(dBestError < 1e20)		RestoreBestWeights();	else	{		GAssert(false, "Total failure!");	}	ReleaseInternalData();	return nEpochs;}void GNeuralNet::TrainInit(GArffData* pTrainingData, GArffData* pValidationData){	GAssert(m_nRunEpochs <= m_nMaximumEpochs, "conflicting settings");	// Add the input layer	MakeInputLayer();	// Make the internal data	MeasureMinAndRanges(pTrainingData);	ReleaseInternalData();	m_pTrainingDataInternal = new GArffData(pTrainingData->GetSize());	ExternalToInternalData(pTrainingData, m_pTrainingDataInternal);	if(pTrainingData == pValidationData)		m_pValidationDataInternal = m_pTrainingDataInternal;	else	{		m_pValidationDataInternal = new GArffData(pValidationData->GetSize());		ExternalToInternalData(pValidationData, m_pValidationDataInternal);	}}void GNeuralNet::TrainEpoch(){	// Do a single epoch	double* pRow;	GNeuron* pNeuron;	int n, i;	int nRowCount = m_pTrainingDataInternal->GetSize();	for(n = 0; n < nRowCount; n++)	{		// Compute output for this row and update the weights		pRow = m_pTrainingDataInternal->GetVector(n);		EvalInternal(pRow);		// Backpropagate the error		Criticize(pRow);		// Ajust the weights in a gradient descent manner		for(i = 0; i < m_nInputStart; i++)		{			pNeuron = (GNeuron*)m_pNeurons->GetPointer(i);			pNeuron->AjustWeights(m_dLearningRate, m_dMomentum);		}	}	m_dLearningRate *= m_dLearningDecay;	m_pTrainingDataInternal->Shuffle();}int GNeuralNet::TrainBatch(GArffData* pTrainingData, GArffData* pValidationData){	TrainInit(pTrainingData, pValidationData);	// Do the epochs	double* pRow;	GNeuron* pNeuron;	int n, i, nEpochs;	double dBestError = 1e20;	int nRowCount = m_pTrainingDataInternal->GetSize();	int nEpochsSinceValidationCheck = 0;	int nBestEpoch = 0;	for(nEpochs = 0; true; nEpochs++)	{		// Train with each of the training examples (one epoch)		for(n = 0; n < nRowCount; n++)		{			// Compute output for this row and update the weights			pRow = m_pTrainingDataInternal->GetVector(n);			EvalInternal(pRow);			// Backpropagate the error			Criticize(pRow);			// Ajust the weight delta in a gradient descent manner			for(i = 0; i < m_nInputStart; i++)			{				pNeuron = (GNeuron*)m_pNeurons->GetPointer(i);				pNeuron->BatchUpdateDeltas(m_dLearningRate);			}		}		// Ajust the weights by the sum weight delta		for(i = 0; i < m_nInputStart; i++)		{			pNeuron = (GNeuron*)m_pNeurons->GetPointer(i);			pNeuron->BatchUpdateWeights();		}		m_dLearningRate *= m_dLearningDecay;		// Check for termination condition		nEpochsSinceValidationCheck++;		if(nEpochsSinceValidationCheck >= m_nEpochsPerValidationCheck)		{			nEpochsSinceValidationCheck = 0;			double dMeanSquareError = TrainValidate();//printf("Epoch: %d\tError=%lf\n", nEpochs, dBestError);			if(dMeanSquareError < dBestError)			{				// Found a new best set of weights				dBestError = dMeanSquareError;				nBestEpoch = nEpochs;				UpdateBestWeights();				if(dMeanSquareError <= m_dAcceptableMeanSquareError)					break;			}			else			{				// Test for termination condition				if(nEpochs - nBestEpoch >= m_nRunEpochs)					break;			}			if(nEpochs >= m_nMaximumEpochs)				break;		}	}	if(dBestError < 1e20)		RestoreBestWeights();	else	{		GAssert(false, "Total failure!");	}	ReleaseInternalData();	return nEpochs;}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
成人免费视频播放| 日本成人在线网站| 99久久777色| 亚洲精品欧美激情| 欧美日韩国产精选| 免播放器亚洲一区| 久久精品人人爽人人爽| 成人av在线资源| 一区二区三区四区高清精品免费观看| 色婷婷国产精品| 丝袜诱惑制服诱惑色一区在线观看| 欧美一区二区三区在线| 国产乱理伦片在线观看夜一区| 欧美激情一区在线观看| 91极品视觉盛宴| 日本不卡的三区四区五区| 久久久777精品电影网影网| 成人av先锋影音| 午夜激情综合网| 久久精品视频免费观看| 欧洲激情一区二区| 狠狠色综合日日| 亚洲黄色录像片| 精品国产在天天线2019| 色哟哟国产精品| 国内一区二区视频| 一区二区三区四区蜜桃| 精品日韩99亚洲| 972aa.com艺术欧美| 久久精品国产色蜜蜜麻豆| ...中文天堂在线一区| 欧美高清激情brazzers| 成人黄色一级视频| 日本欧美大码aⅴ在线播放| 中文字幕一区二区三区色视频| 91精品国产综合久久婷婷香蕉| 国产成人精品免费一区二区| 五月天久久比比资源色| 最近日韩中文字幕| 久久网站热最新地址| 欧美三级三级三级爽爽爽| 成人av影视在线观看| 日本不卡中文字幕| 亚洲激情一二三区| 国产欧美一区二区精品性色超碰| 欧美揉bbbbb揉bbbbb| 成人精品亚洲人成在线| 蜜桃免费网站一区二区三区| 亚洲一区免费视频| 最新日韩在线视频| 国产欧美精品日韩区二区麻豆天美| 欧美男人的天堂一二区| 91尤物视频在线观看| 国产乱码精品1区2区3区| 日本欧美肥老太交大片| 亚洲国产视频网站| 亚洲女爱视频在线| 国产精品视频免费看| 久久女同性恋中文字幕| 日韩欧美一二三区| 欧美片网站yy| 欧美日韩国产三级| 欧美在线看片a免费观看| 99re8在线精品视频免费播放| 国产成人精品免费| 国产电影一区二区三区| 国产精品小仙女| 国产永久精品大片wwwapp| 日韩成人免费电影| 日本成人超碰在线观看| 日本成人在线网站| 青青国产91久久久久久| 日韩av在线免费观看不卡| 日韩和欧美一区二区三区| 亚洲国产视频直播| 日韩成人av影视| 看电视剧不卡顿的网站| 久久国内精品视频| 久久超级碰视频| 国产一区二区不卡老阿姨| 国产一区福利在线| 国产成人综合网| 成人av在线资源网站| 91首页免费视频| 欧美专区日韩专区| 在线不卡一区二区| 欧美一级二级在线观看| 欧美精品一区二区三区久久久 | 欧美精品一区二区不卡| 欧美一级高清片在线观看| 精品嫩草影院久久| 久久久www成人免费无遮挡大片| 欧美国产欧美亚州国产日韩mv天天看完整 | 精品精品欲导航| 国产日产欧美一区二区三区| 国产精品三级视频| 一区二区国产视频| 五月开心婷婷久久| 国产又粗又猛又爽又黄91精品| 成人免费的视频| 欧美色图免费看| 久久久久高清精品| 亚洲欧美电影一区二区| 日本va欧美va瓶| 国产盗摄一区二区三区| 91黄色小视频| 精品久久久久香蕉网| 中文字幕亚洲在| 日韩av电影一区| 成人av资源站| 欧美一区二区播放| 亚洲欧洲日本在线| 日韩在线a电影| a亚洲天堂av| 欧美一区二区三区免费观看视频| 国产婷婷一区二区| 亚洲成人综合网站| 成人黄色综合网站| 日韩亚洲欧美中文三级| 亚洲男同性视频| 狠狠色狠狠色综合日日91app| 欧洲视频一区二区| 国产欧美精品日韩区二区麻豆天美| 亚洲妇女屁股眼交7| 国产成人午夜视频| 这里只有精品99re| 亚洲欧洲日韩一区二区三区| 精品一区免费av| 欧美日韩一二区| 中文字幕精品综合| 美腿丝袜亚洲综合| 欧美亚洲动漫精品| 中文一区二区在线观看 | 麻豆成人久久精品二区三区小说| voyeur盗摄精品| 久久尤物电影视频在线观看| 亚洲成人免费影院| av电影在线观看一区| 2020国产精品自拍| 天堂av在线一区| 在线视频中文字幕一区二区| 国产精品久久久久婷婷| 狠狠色狠狠色综合| 日韩一级黄色大片| 亚洲成av人在线观看| 91福利在线导航| 亚洲日本在线天堂| 岛国一区二区在线观看| 欧美mv日韩mv亚洲| 视频在线观看91| 欧洲一区在线电影| 亚洲欧美精品午睡沙发| 成人国产精品免费网站| 国产亚洲一区二区三区在线观看| 日本亚洲电影天堂| 9191精品国产综合久久久久久| 一区二区欧美在线观看| 91麻豆文化传媒在线观看| 中文字幕一区二区三中文字幕| 成人丝袜视频网| 国产精品国产三级国产aⅴ中文| 成人性生交大片免费看在线播放 | 欧美日韩1区2区| 亚洲mv在线观看| 在线播放欧美女士性生活| 亚洲国产va精品久久久不卡综合| 色婷婷av一区二区三区gif| 国产精品国产馆在线真实露脸| 成人精品视频.| 亚洲欧美怡红院| 色成人在线视频| 亚洲第一av色| 欧美一区二区三区精品| 久久av中文字幕片| 欧美精品一区二区精品网| 国产91在线|亚洲| 综合久久综合久久| 色婷婷亚洲婷婷| 天天av天天翘天天综合网| 91精品国产综合久久国产大片 | 午夜激情久久久| 日韩精品一区二区三区中文不卡| 精东粉嫩av免费一区二区三区| 久久精品亚洲乱码伦伦中文| eeuss影院一区二区三区 | 亚洲国产中文字幕在线视频综合| 欧美色精品天天在线观看视频| 石原莉奈在线亚洲二区| 精品成a人在线观看| 国产91对白在线观看九色| 综合色中文字幕| 欧美高清视频不卡网| 激情综合亚洲精品| 亚洲人成亚洲人成在线观看图片 | 欧美精品一区二区高清在线观看| 国产精品一二三区| 伊人夜夜躁av伊人久久| 91精选在线观看| 懂色一区二区三区免费观看| 一区二区三区成人|