亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? cneuralnet.cpp

?? 《游戲編程中的人工智能技術》一書中8
?? CPP
字號:
#include "CNeuralNet.h"




//*************************** methods for Neuron **********************
//
//---------------------------------------------------------------------
SNeuron::SNeuron(int NumInputs): m_iNumInputs(NumInputs+1),
                                 m_dActivation(0),
                                 m_dError(0)
											
{
	//we need an additional weight for the bias hence the +1
	for (int i=0; i<NumInputs+1; ++i)
	{
		//set up the weights with an initial random value
		m_vecWeight.push_back(RandomClamped());

    m_vecPrevUpdate.push_back(0);
	}
}




//************************ methods for NeuronLayer **********************

//-----------------------------------------------------------------------
//	ctor creates a layer of neurons of the required size by calling the 
//	SNeuron ctor the rqd number of times
//-----------------------------------------------------------------------
SNeuronLayer::SNeuronLayer(int NumNeurons, 
                           int NumInputsPerNeuron):	m_iNumNeurons(NumNeurons)
{
	for (int i=0; i<NumNeurons; ++i)

		m_vecNeurons.push_back(SNeuron(NumInputsPerNeuron));
}




//************************ methods forCNeuralNet ************************



//------------------------------- ctor -----------------------------------
//
//------------------------------------------------------------------------
CNeuralNet::CNeuralNet(int NumInputs,
                       int NumOutputs,
                       int HiddenNeurons,
                       double LearningRate,
                       bool   softmax = false): m_iNumInputs(NumInputs),
                                                m_iNumOutputs(NumOutputs),
                                                m_iNumHiddenLayers(1),
                                                m_bSoftMax(softmax),
                                                m_iNeuronsPerHiddenLyr(HiddenNeurons),
                                                m_dLearningRate(LearningRate),
                                                m_dErrorSum(9999),
                                                m_bTrained(false),
                                                m_iNumEpochs(0)
                                             
{
	CreateNet();
}

//------------------------------createNet()------------------------------
//
//	this method builds the ANN. The weights are all initially set to 
//	random values -1 < w < 1
//------------------------------------------------------------------------
void CNeuralNet::CreateNet()
{
	//create the layers of the network
	if (m_iNumHiddenLayers > 0)
	{
		//create first hidden layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNeuronsPerHiddenLyr, m_iNumInputs));
    
    for (int i=0; i<m_iNumHiddenLayers-1; ++i)
    {

			m_vecLayers.push_back(SNeuronLayer(m_iNeuronsPerHiddenLyr,
                                         m_iNeuronsPerHiddenLyr));
    }

    //create output layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNumOutputs, m_iNeuronsPerHiddenLyr));
	}

  else
  {
	  //create output layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNumOutputs, m_iNumInputs));
  }
}


//--------------------------- Initialize ---------------------------------
//
//  randomizes all the weights to values btween 0 and 1
//------------------------------------------------------------------------
void CNeuralNet::InitializeNetwork()
{
	//for each layer
	for (int i=0; i<m_iNumHiddenLayers + 1; ++i)
	{
		//for each neuron
		for (int n=0; n<m_vecLayers[i].m_iNumNeurons; ++n)
		{
			//for each weight
			for (int k=0; k<m_vecLayers[i].m_vecNeurons[n].m_iNumInputs; ++k)
			{
				m_vecLayers[i].m_vecNeurons[n].m_vecWeight[k] = RandomClamped();
			}
		}
	}

  m_dErrorSum  = 9999;
  m_iNumEpochs = 0;

	return;
}


//-----------------------------------------------------------------------
vector<double> CNeuralNet::Update(vector<double> inputs)
{ 
 
  //stores the resultant outputs from each layer
	vector<double> outputs;

	int cWeight = 0;
	
	//first check that we have the correct amount of inputs
	if (inputs.size() != m_iNumInputs)
  {
		//just return an empty vector if incorrect.
		return outputs;
  }
	
	//For each layer...
	for (int i=0; i<m_iNumHiddenLayers + 1; ++i)
	{
		
		if ( i > 0 )
    {
			inputs = outputs;
    }

		outputs.clear();
		
		cWeight = 0;

		//for each neuron sum the (inputs * corresponding weights).Throw 
		//the total at our sigmoid function to get the output.
		for (int n=0; n<m_vecLayers[i].m_iNumNeurons; ++n)
		{
			double netinput = 0.0f;

			int	NumInputs = m_vecLayers[i].m_vecNeurons[n].m_iNumInputs;
			
			//for each weight
			for (int k=0; k<NumInputs - 1; ++k)
			{
				//sum the weights x inputs
				netinput += m_vecLayers[i].m_vecNeurons[n].m_vecWeight[k] * 
                    inputs[cWeight++];
			}

			//add in the bias
			netinput += m_vecLayers[i].m_vecNeurons[n].m_vecWeight[NumInputs-1] * 
                  BIAS;

			 
      //softmax on output layers
      if(m_bSoftMax && (i == m_iNumHiddenLayers))
      {
        m_vecLayers[i].m_vecNeurons[n].m_dActivation = exp(netinput);
        
      }

      else
      {
        //The combined activation is first filtered through the sigmoid 
        //function and a record is kept for each neuron 
        m_vecLayers[i].m_vecNeurons[n].m_dActivation = Sigmoid(netinput, ACTIVATION_RESPONSE);
      }

			//store the outputs from each layer as we generate them.
      outputs.push_back(m_vecLayers[i].m_vecNeurons[n].m_dActivation);

			cWeight = 0;
		}
	}

  if (m_bSoftMax)
  {
    double expTot = 0;

    //first calculate the exp for the sum of the outputs
    for (int o=0; o<outputs.size(); ++o)
    {
      expTot += outputs[o];
    }    

    //now adjust each output accordingly
    for (o=0; o<outputs.size(); ++o)
    {
      outputs[o] = outputs[o]/expTot;

      m_vecLayers[m_iNumHiddenLayers].m_vecNeurons[o].m_dActivation = outputs[o];    
    }
  }

	return outputs;
}
//----------------------------NetworkTrainingEpoch -----------------------
//
//  given a training set this method trains the network using backprop.
//  The training sets comprise of series of input vectors and a series
//  of output vectors.
//  Returns false if there is a problem
//------------------------------------------------------------------------
bool CNeuralNet::NetworkTrainingEpoch(vector<iovector> &SetIn,
                                      vector<iovector> &SetOut)
{
  //create some iterators
  vector<double>::iterator  curWeight;
  vector<SNeuron>::iterator curNrnOut, curNrnHid;

  double WeightUpdate = 0;

  //this will hold the cumulative error value for the training set
  m_dErrorSum = 0;

  //run each input pattern through the network, calculate the errors and update
  //the weights accordingly
  for (int vec=0; vec<SetIn.size(); ++vec)
  {
    //first run this input vector through the network and retrieve the outputs
    vector<double> outputs = Update(SetIn[vec]);

    //return if error has occurred
    if (outputs.size() == 0)
    {
      return false;
    }

    //for each output neuron calculate the error and adjust weights
    //accordingly
    for (int op=0; op<m_iNumOutputs; ++op)
    {
      //first calculate the error value
      double err = (SetOut[vec][op] - outputs[op]) * outputs[op]
                   * (1 - outputs[op]);      

      //keep a record of the error value
      m_vecLayers[1].m_vecNeurons[op].m_dError = err;

      curWeight = m_vecLayers[1].m_vecNeurons[op].m_vecWeight.begin();
      curNrnHid = m_vecLayers[0].m_vecNeurons.begin();

      int w = 0;

      //for each weight up to but not including the bias
      while(curWeight != m_vecLayers[1].m_vecNeurons[op].m_vecWeight.end()-1)
      {
        //calculate weight update
        WeightUpdate = err * m_dLearningRate * curNrnHid->m_dActivation;
        
        //calculate the new weight based on the backprop rules and adding in momentum
        *curWeight += WeightUpdate + m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] * MOMENTUM;

        //keep a record of this weight update
        m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] = WeightUpdate;

        ++curWeight; ++curNrnHid; ++w;
      }

      //and the bias for this neuron
      WeightUpdate = err * m_dLearningRate * BIAS;

      *curWeight += WeightUpdate + m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] * MOMENTUM;  

      //keep a record of this weight update
      m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] = WeightUpdate;
      
      
    }

    //update the error total. (when this value becomes lower than a
    //preset threshold we know the training is successful)
    double error = 0;

    if (!m_bSoftMax) //Use SSE
    {
      for (int o=0; o<m_iNumOutputs; ++o)
      {
       
        error += (SetOut[vec][o] - outputs[o]) *
                 (SetOut[vec][o] - outputs[o]);
      }
    }

    else  //use cross-entropy error
    {
      for (int o=0; o<m_iNumOutputs; ++o)
      {
        error += SetOut[vec][o] * log(outputs[o]);
      }

      error = -error;
    }
     
    m_dErrorSum += error;


   //**moving backwards to the hidden layer**
    curNrnHid = m_vecLayers[0].m_vecNeurons.begin();

    int n = 0;
    
    //for each neuron in the hidden layer calculate the error signal
    //and then adjust the weights accordingly
    while(curNrnHid != m_vecLayers[0].m_vecNeurons.end())
    {
      double err = 0;

      curNrnOut = m_vecLayers[1].m_vecNeurons.begin();

      //to calculate the error for this neuron we need to iterate through
      //all the neurons in the output layer it is connected to and sum
      //the error * weights
      while(curNrnOut != m_vecLayers[1].m_vecNeurons.end())
      {
        err += curNrnOut->m_dError * curNrnOut->m_vecWeight[n];

        ++curNrnOut;
      }

      //now we can calculate the error
      err *= curNrnHid->m_dActivation * (1 - curNrnHid->m_dActivation);     
      
      //for each weight in this neuron calculate the new weight based
      //on the error signal and the learning rate
      for (int w=0; w<m_iNumInputs; ++w)
      {
        WeightUpdate = err * m_dLearningRate * SetIn[vec][w];

        //calculate the new weight based on the backprop rules and adding in momentum
        curNrnHid->m_vecWeight[w] += WeightUpdate + curNrnHid->m_vecPrevUpdate[w] * MOMENTUM;
        
        //keep a record of this weight update
        curNrnHid->m_vecPrevUpdate[w] = WeightUpdate;

      }

      //and the bias
      WeightUpdate = err * m_dLearningRate * BIAS;

      curNrnHid->m_vecWeight[m_iNumInputs] += WeightUpdate + curNrnHid->m_vecPrevUpdate[w] * MOMENTUM;

      //keep a record of this weight update
       curNrnHid->m_vecPrevUpdate[w] = WeightUpdate;

      ++curNrnHid;
      ++n;
    }

  }//next input vector
  return true;
}

//----------------------------- Train ------------------------------------
//
//  Given some training data in the form of a CData object this function
//  trains the network until the error is within acceptable limits.
// 
//  the HWND is required to give some graphical feedback
//------------------------------------------------------------------------
bool CNeuralNet::Train(CData* data, HWND hwnd)
{
  vector<vector<double> > SetIn  = data->GetInputSet();
  vector<vector<double> > SetOut = data->GetOutputSet();

   //first make sure the training set is valid
   if ((SetIn.size()     != SetOut.size())  || 
       (SetIn[0].size()  != m_iNumInputs)   ||
       (SetOut[0].size() != m_iNumOutputs))
   {
     MessageBox(NULL, "Inputs != Outputs", "Error", NULL);
    
     return false;
   }
  
   //initialize all the weights to small random values
   InitializeNetwork();

   //train using backprop until the SSE is below the user defined
   //threshold
   while( m_dErrorSum > ERROR_THRESHOLD )
   {
     //return false if there are any problems
     if (!NetworkTrainingEpoch(SetIn, SetOut))
     {
       return false;
     }

     ++m_iNumEpochs;
     
     //call the render routine to display the error sum
     InvalidateRect(hwnd, NULL, TRUE);
		 UpdateWindow(hwnd);
   }

   m_bTrained = true;
   
   return true;
}


//-------------------------------Sigmoid function-------------------------
//
//------------------------------------------------------------------------
double CNeuralNet::Sigmoid(double netinput, double response)
{
	return ( 1 / ( 1 + exp(-netinput / response)));
}


?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国内精品嫩模私拍在线| 依依成人精品视频| 欧美一区二区视频在线观看2020| 91麻豆123| 色婷婷综合在线| 在线免费观看日本欧美| 91久久精品网| 777午夜精品免费视频| 日韩视频国产视频| 久久综合久久综合久久综合| 国产亚洲精品超碰| 亚洲欧美怡红院| 亚洲精品国产a| 日韩高清不卡在线| 久久国产成人午夜av影院| 国产中文字幕一区| 国产91露脸合集magnet | 欧美在线999| 欧美日韩精品一区视频| 91精品国产欧美一区二区成人| 日韩美女在线视频| 国产精品美日韩| 亚洲一区二区在线观看视频 | 日韩一级成人av| 久久尤物电影视频在线观看| 国产精品美女一区二区| 亚洲一区影音先锋| 国产一区二区三区不卡在线观看| 成人av网站在线| 欧美视频日韩视频| 国产欧美一区二区精品忘忧草| 综合电影一区二区三区| 日本aⅴ免费视频一区二区三区| 国产精一品亚洲二区在线视频| 成人国产精品免费观看动漫| 欧美日韩国产综合一区二区三区| 精品国产乱码久久久久久免费| ●精品国产综合乱码久久久久| 日韩av一级电影| 成人av资源在线观看| 欧美日高清视频| 国产精品网站一区| 免费看欧美美女黄的网站| 97精品视频在线观看自产线路二| 欧美一区欧美二区| 亚洲精品乱码久久久久久| 激情六月婷婷久久| 欧美精品自拍偷拍| 中文字幕中文在线不卡住| 久久精品99久久久| 欧美视频在线一区| 中文字幕亚洲精品在线观看| 九九国产精品视频| 精品视频免费看| 亚洲美女在线一区| 欧美日韩一卡二卡| 国产精品久久久一区麻豆最新章节| 青青草国产精品97视觉盛宴| 欧美性受xxxx黑人xyx| 国产精品免费网站在线观看| 精品亚洲成a人| 欧美一级爆毛片| 肉肉av福利一精品导航| 色综合天天天天做夜夜夜夜做| 国产精品乱码人人做人人爱 | 久久久久久9999| 蜜臀av性久久久久蜜臀aⅴ四虎| 欧美午夜片在线看| 亚洲码国产岛国毛片在线| www.欧美色图| 国产精品毛片久久久久久| 国产一区二区在线观看免费| 日韩免费一区二区三区在线播放| 日韩精品91亚洲二区在线观看| 欧美日韩精品福利| 丝袜美腿高跟呻吟高潮一区| 欧美日韩久久一区| 午夜精品福利一区二区蜜股av| 91极品美女在线| 亚洲第一av色| 337p亚洲精品色噜噜狠狠| 日韩激情在线观看| 精品国内二区三区| 国产激情一区二区三区四区| 国产欧美精品在线观看| 99r精品视频| 亚洲一区二区三区小说| 91精品国产综合久久久久久漫画| 蜜臀va亚洲va欧美va天堂| 精品国产制服丝袜高跟| 成人一区二区视频| 亚洲欧美偷拍另类a∨色屁股| 色综合中文综合网| 青草国产精品久久久久久| 欧美一区二区成人| 国产呦萝稀缺另类资源| 国产精品乱码一区二三区小蝌蚪| 在线视频欧美精品| 奇米精品一区二区三区在线观看| 久久蜜臀精品av| 99国内精品久久| 丝袜诱惑亚洲看片| 久久天天做天天爱综合色| 91在线观看美女| 天堂久久一区二区三区| 久久亚洲捆绑美女| 日本伦理一区二区| 激情伊人五月天久久综合| 国产精品国产精品国产专区不蜜| 欧美色偷偷大香| 国产激情一区二区三区桃花岛亚洲 | 欧美伊人久久大香线蕉综合69| 日韩av网站在线观看| 国产日韩欧美亚洲| 欧美酷刑日本凌虐凌虐| 国产99久久久国产精品潘金网站| 亚洲国产精品久久久久婷婷884| 精品第一国产综合精品aⅴ| 色999日韩国产欧美一区二区| 久久99久久99| 性欧美大战久久久久久久久| 中文一区在线播放| 欧美丰满美乳xxx高潮www| 成人夜色视频网站在线观看| 日韩成人精品在线观看| 亚洲图片激情小说| 国产无人区一区二区三区| 欧美日韩国产片| 91美女片黄在线| 处破女av一区二区| 麻豆精品久久精品色综合| 亚洲一区二区三区四区中文字幕| 久久精品视频一区二区三区| 欧美一区二区三区色| 在线视频你懂得一区二区三区| 国产99久久久国产精品免费看| 日韩国产在线观看| 午夜视频久久久久久| 亚洲精品视频自拍| 亚洲欧洲色图综合| 国产精品午夜免费| 日本一区二区视频在线观看| 久久综合九色综合97婷婷女人 | 日韩一区有码在线| 日本一区二区综合亚洲| 国产日韩一级二级三级| 日韩区在线观看| 日韩三级伦理片妻子的秘密按摩| 欧美日韩大陆在线| 欧美人狂配大交3d怪物一区| 欧美亚洲国产一区二区三区| 99久久精品99国产精品| 99久久婷婷国产| 一本久久综合亚洲鲁鲁五月天| 不卡欧美aaaaa| 色综合天天性综合| 在线精品视频一区二区| 色香蕉久久蜜桃| 欧美色国产精品| 欧美日韩精品免费| 日韩一级完整毛片| 精品美女在线观看| 国产日本亚洲高清| 国产精品久久午夜夜伦鲁鲁| 亚洲啪啪综合av一区二区三区| 亚洲乱码国产乱码精品精的特点 | 成人app下载| 91农村精品一区二区在线| 欧美视频自拍偷拍| 欧美性xxxxxxxx| 日韩区在线观看| 国产欧美综合在线| 亚洲欧美另类综合偷拍| 亚洲成a人片综合在线| 人人超碰91尤物精品国产| 国精产品一区一区三区mba视频| 国产精品88av| 在线免费不卡电影| 日韩三级av在线播放| 国产精品毛片无遮挡高清| 亚洲狠狠爱一区二区三区| 麻豆91精品视频| 成人美女视频在线观看| 欧美日韩国产首页| 久久久蜜桃精品| 亚洲精品久久久久久国产精华液| 午夜电影久久久| 国产成人精品亚洲日本在线桃色| 色欧美日韩亚洲| 亚洲精品一线二线三线| 亚洲人亚洲人成电影网站色| 日本伊人色综合网| 91片在线免费观看| 精品国产乱码久久久久久牛牛| 亚洲乱码日产精品bd| 国产一区二区三区香蕉| 欧美天堂亚洲电影院在线播放| 久久久天堂av| 久久精品理论片| 欧美影院精品一区|