亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? cneuralnet.cpp

?? 開發(fā)游戲人工智能的王道書
?? CPP
字號:
#include "CNeuralNet.h"


//*************************** methods for Neuron **********************
//
//---------------------------------------------------------------------
SNeuron::SNeuron(int NumInputs): m_iNumInputs(NumInputs+1),
                                 m_dActivation(0),
                                 m_dError(0)
											
{
	//we need an additional weight for the bias hence the +1
	for (int i=0; i<NumInputs+1; ++i)
	{
		//set up the weights with an initial random value
		m_vecWeight.push_back(RandomClamped());

    m_vecPrevUpdate.push_back(0);
	}
}




//************************ methods for NeuronLayer **********************

//-----------------------------------------------------------------------
//	ctor creates a layer of neurons of the required size by calling the 
//	SNeuron ctor the rqd number of times
//-----------------------------------------------------------------------
SNeuronLayer::SNeuronLayer(int NumNeurons, 
                           int NumInputsPerNeuron):	m_iNumNeurons(NumNeurons)
{
	for (int i=0; i<NumNeurons; ++i)

		m_vecNeurons.push_back(SNeuron(NumInputsPerNeuron));
}




//************************ methods forCNeuralNet ************************



//------------------------------- ctor -----------------------------------
//
//------------------------------------------------------------------------
CNeuralNet::CNeuralNet(int NumInputs,
                       int NumOutputs,
                       int HiddenNeurons,
                       double LearningRate):m_iNumInputs(NumInputs),
                                            m_iNumOutputs(NumOutputs),
                                            m_iNumHiddenLayers(1),
                                            m_iNeuronsPerHiddenLyr(HiddenNeurons),
                                            m_dLearningRate(LearningRate),
                                            m_dErrorSum(9999),
                                            m_bTrained(false),
                                            m_iNumEpochs(0)
{
	CreateNet();
}

//------------------------------createNet()------------------------------
//
//	this method builds the ANN. The weights are all initially set to 
//	random values -1 < w < 1
//------------------------------------------------------------------------
void CNeuralNet::CreateNet()
{
	//create the layers of the network
	if (m_iNumHiddenLayers > 0)
	{
		//create first hidden layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNeuronsPerHiddenLyr, m_iNumInputs));
    
    for (int i=0; i<m_iNumHiddenLayers-1; ++i)
    {

			m_vecLayers.push_back(SNeuronLayer(m_iNeuronsPerHiddenLyr,
                                         m_iNeuronsPerHiddenLyr));
    }

    //create output layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNumOutputs, m_iNeuronsPerHiddenLyr));
	}

  else
  {
	  //create output layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNumOutputs, m_iNumInputs));
  }
}


//--------------------------- Initialize ---------------------------------
//
//  randomizes all the weights to values btween 0 and 1
//------------------------------------------------------------------------
void CNeuralNet::InitializeNetwork()
{
	//for each layer
	for (int i=0; i<m_iNumHiddenLayers + 1; ++i)
	{
		//for each neuron
		for (int n=0; n<m_vecLayers[i].m_iNumNeurons; ++n)
		{
			//for each weight
			for (int k=0; k<m_vecLayers[i].m_vecNeurons[n].m_iNumInputs; ++k)
			{
				m_vecLayers[i].m_vecNeurons[n].m_vecWeight[k] = RandomClamped();
			}
		}
	}

  m_dErrorSum  = 9999;
  m_iNumEpochs = 0;

	return;
}

//-------------------------------Update-----------------------------------
//
//	given an input vector this function calculates the output vector
//
//------------------------------------------------------------------------
vector<double> CNeuralNet::Update(vector<double> inputs)
{
  //stores the resultant outputs from each layer
	vector<double> outputs;
  
	int cWeight = 0;
	
	//first check that we have the correct amount of inputs
	if (inputs.size() != m_iNumInputs)
  {
		//just return an empty vector if incorrect.
		return outputs;
  }
	
	//For each layer...
	for (int i=0; i<m_iNumHiddenLayers + 1; ++i)
	{
		
		if ( i > 0 )
    {
			inputs = outputs;
    }

		outputs.clear();
		
		cWeight = 0;

		//for each neuron sum the (inputs * corresponding weights).Throw 
		//the total at our sigmoid function to get the output.
		for (int n=0; n<m_vecLayers[i].m_iNumNeurons; ++n)
		{
			double netinput = 0.0f;

			int	NumInputs = m_vecLayers[i].m_vecNeurons[n].m_iNumInputs;
			
			//for each weight
			for (int k=0; k<NumInputs - 1; ++k)
			{
				//sum the weights x inputs
				netinput += m_vecLayers[i].m_vecNeurons[n].m_vecWeight[k] * 
                    inputs[cWeight++];
			}

			//add in the bias
			netinput += m_vecLayers[i].m_vecNeurons[n].m_vecWeight[NumInputs-1] * 
                  BIAS;

			 
      //The combined activation is first filtered through the sigmoid 
      //function and a record is kept for each neuron 
      m_vecLayers[i].m_vecNeurons[n].m_dActivation = 
        Sigmoid(netinput, ACTIVATION_RESPONSE);

			//store the outputs from each layer as we generate them.
      outputs.push_back(m_vecLayers[i].m_vecNeurons[n].m_dActivation);

			cWeight = 0;
		}
	}

	return outputs;
}

//----------------------------NetworkTrainingEpoch -----------------------
//
//  given a training set this method trains the network using backprop.
//  The training sets comprise of series of input vectors and a series
//  of output vectors.
//  Returns false if there is a problem
//------------------------------------------------------------------------
bool CNeuralNet::NetworkTrainingEpoch(vector<iovector> &SetIn,
                                      vector<iovector> &SetOut)
{
  //create some iterators
  vector<double>::iterator  curWeight;
  vector<SNeuron>::iterator curNrnOut, curNrnHid;

  double WeightUpdate = 0;

  //this will hold the cumulative error value for the training set
  m_dErrorSum = 0;

  //run each input pattern through the network, calculate the errors and update
  //the weights accordingly
  for (int vec=0; vec<SetIn.size(); ++vec)
  {
    //first run this input vector through the network and retrieve the outputs
    vector<double> outputs = Update(SetIn[vec]);

    //return if error has occurred
    if (outputs.size() == 0)
    {
      return false;
    }

    //for each output neuron calculate the error and adjust weights
    //accordingly
    for (int op=0; op<m_iNumOutputs; ++op)
    {
      //first calculate the error value
      double err = (SetOut[vec][op] - outputs[op]) * outputs[op]
                   * (1 - outputs[op]);

      //update the error total. (when this value becomes lower than a
      //preset threshold we know the training is successful)
      m_dErrorSum += (SetOut[vec][op] - outputs[op]) *
                     (SetOut[vec][op] - outputs[op]);      

      //keep a record of the error value
      m_vecLayers[1].m_vecNeurons[op].m_dError = err;

      curWeight = m_vecLayers[1].m_vecNeurons[op].m_vecWeight.begin();
      curNrnHid = m_vecLayers[0].m_vecNeurons.begin();

      int w = 0;

      //for each weight up to but not including the bias
      while(curWeight != m_vecLayers[1].m_vecNeurons[op].m_vecWeight.end()-1)
      {
        //calculate weight update
        WeightUpdate = err * m_dLearningRate * curNrnHid->m_dActivation;
        
        //calculate the new weight based on the backprop rules and adding in momentum
        *curWeight += WeightUpdate + m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] * MOMENTUM;

        //keep a record of this weight update
        m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] = WeightUpdate;

        ++curWeight; ++curNrnHid; ++w;
      }

      //and the bias for this neuron
      WeightUpdate = err * m_dLearningRate * BIAS;

      *curWeight += WeightUpdate + m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] * MOMENTUM;  

      //keep a record of this weight update
      m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] = WeightUpdate;
      
      
    }

   //**moving backwards to the hidden layer**
    curNrnHid = m_vecLayers[0].m_vecNeurons.begin();

    int n = 0;
    
    //for each neuron in the hidden layer calculate the error signal
    //and then adjust the weights accordingly
    while(curNrnHid != m_vecLayers[0].m_vecNeurons.end())
    {
      double err = 0;

      curNrnOut = m_vecLayers[1].m_vecNeurons.begin();

      //to calculate the error for this neuron we need to iterate through
      //all the neurons in the output layer it is connected to and sum
      //the error * weights
      while(curNrnOut != m_vecLayers[1].m_vecNeurons.end())
      {
        err += curNrnOut->m_dError * curNrnOut->m_vecWeight[n];

        ++curNrnOut;
      }

      //now we can calculate the error
      err *= curNrnHid->m_dActivation * (1 - curNrnHid->m_dActivation);     
      
      //for each weight in this neuron calculate the new weight based
      //on the error signal and the learning rate
      for (int w=0; w<m_iNumInputs; ++w)
      {
        WeightUpdate = err * m_dLearningRate * SetIn[vec][w];

        //calculate the new weight based on the backprop rules and adding in momentum
        curNrnHid->m_vecWeight[w] += WeightUpdate + curNrnHid->m_vecPrevUpdate[w] * MOMENTUM;
        
        //keep a record of this weight update
        curNrnHid->m_vecPrevUpdate[w] = WeightUpdate;

      }

      //and the bias
      WeightUpdate = err * m_dLearningRate * BIAS;

      curNrnHid->m_vecWeight[m_iNumInputs] += WeightUpdate + curNrnHid->m_vecPrevUpdate[w] * MOMENTUM;

      //keep a record of this weight update
       curNrnHid->m_vecPrevUpdate[w] = WeightUpdate;

      ++curNrnHid;
      ++n;
    }

  }//next input vector
  return true;
}

//----------------------------- Train ------------------------------------
//
//  Given some training data in the form of a CData object this function
//  trains the network until the error is within acceptable limits.
// 
//  the HWND is required to give some graphical feedback
//------------------------------------------------------------------------
bool CNeuralNet::Train(CData* data, HWND hwnd)
{
  vector<vector<double> > SetIn  = data->GetInputSet();
  vector<vector<double> > SetOut = data->GetOutputSet();

   //first make sure the training set is valid
   if ((SetIn.size()     != SetOut.size())  || 
       (SetIn[0].size()  != m_iNumInputs)   ||
       (SetOut[0].size() != m_iNumOutputs))
   {
     MessageBox(NULL, "Inputs != Outputs", "Error", NULL);
    
     return false;
   }
  
   //initialize all the weights to small random values
   InitializeNetwork();

   //train using backprop until the SSE is below the user defined
   //threshold
   while( m_dErrorSum > ERROR_THRESHOLD )
   {
     //return false if there are any problems
     if (!NetworkTrainingEpoch(SetIn, SetOut))
     {
       return false;
     }

     ++m_iNumEpochs;
     
     //call the render routine to display the error sum
     InvalidateRect(hwnd, NULL, TRUE);
		 UpdateWindow(hwnd);
   }

   m_bTrained = true;
   
   return true;
}


//-------------------------------Sigmoid function-------------------------
//
//------------------------------------------------------------------------
double CNeuralNet::Sigmoid(double netinput, double response)
{
	return ( 1 / ( 1 + exp(-netinput / response)));
}


?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产精品一区二区久久不卡| 国产精品福利影院| 亚洲成人先锋电影| 91免费版pro下载短视频| 欧美激情在线看| 国产一区999| 欧美精品一区二区三区在线播放| 日欧美一区二区| 欧美丰满嫩嫩电影| 三级久久三级久久久| www.欧美日韩| **性色生活片久久毛片| 99久久久无码国产精品| 亚洲男人的天堂在线aⅴ视频| 99麻豆久久久国产精品免费优播| 中文一区在线播放| 黑人巨大精品欧美黑白配亚洲| 精品欧美一区二区三区精品久久| 日韩高清不卡一区二区| 日韩欧美色电影| 日本不卡视频在线| 精品av久久707| 国产呦精品一区二区三区网站| 国产日本欧美一区二区| 国产成人精品午夜视频免费| 中文字幕在线观看一区| 91在线视频网址| 亚洲一区二区三区不卡国产欧美| 欧美日韩情趣电影| 日本欧美加勒比视频| 91精品综合久久久久久| 国产一区二区在线视频| 国产日韩高清在线| 欧美午夜精品一区| 国产一区中文字幕| 国产精品久久久久一区| 欧美探花视频资源| 青青青爽久久午夜综合久久午夜| 久久久国产午夜精品| 99精品欧美一区| 亚洲成国产人片在线观看| 日韩欧美黄色影院| 99re在线精品| 有坂深雪av一区二区精品| 欧美嫩在线观看| 国产在线精品一区二区不卡了 | 精品成人在线观看| 丁香桃色午夜亚洲一区二区三区| 亚洲精品成人天堂一二三| 91精品欧美福利在线观看| 豆国产96在线|亚洲| 欧美成va人片在线观看| 中文成人av在线| 欧美色倩网站大全免费| 国产在线视频一区二区三区| 自拍偷拍欧美精品| 精品欧美乱码久久久久久1区2区| 99精品久久只有精品| 久久国产成人午夜av影院| 综合色天天鬼久久鬼色| 精品久久久久久最新网址| 国v精品久久久网| 午夜欧美视频在线观看| 国产精品萝li| 欧美日韩在线免费视频| 成人免费观看av| 久久精品国产网站| 一个色妞综合视频在线观看| 欧美国产综合色视频| 91精品国产高清一区二区三区| 成人深夜在线观看| 麻豆国产欧美一区二区三区| 亚洲一区二区三区四区在线观看| 国产欧美视频一区二区三区| 五月婷婷激情综合| 国产酒店精品激情| 久久国产生活片100| 日韩av中文在线观看| 日韩av电影免费观看高清完整版在线观看| 亚洲视频1区2区| 一区二区在线观看免费视频播放| 亚洲视频在线观看一区| 亚洲摸摸操操av| 亚洲一区二区视频| 三级一区在线视频先锋| 久久精品久久久精品美女| 国产一区二区精品在线观看| 成人免费视频一区| av成人免费在线观看| 在线观看欧美黄色| 7777精品伊人久久久大香线蕉超级流畅 | 亚洲aaa精品| 丝袜亚洲另类欧美| 午夜精品久久久久影视| 婷婷开心激情综合| 美女视频第一区二区三区免费观看网站| 美女mm1313爽爽久久久蜜臀| 国产美女久久久久| 97久久精品人人做人人爽| 欧美日韩另类一区| 久久综合久久99| 亚洲欧美日韩国产中文在线| 亚洲国产精品久久艾草纯爱 | 亚洲一二三四区不卡| 日韩成人免费在线| 国产宾馆实践打屁股91| 色综合网站在线| 3751色影院一区二区三区| 久久久美女毛片| 最新中文字幕一区二区三区| 婷婷久久综合九色国产成人| 国产伦精品一区二区三区免费迷 | 欧洲一区二区av| 欧美不卡激情三级在线观看| 中文字幕日韩一区| 麻豆精品视频在线观看视频| 成人精品高清在线| 91精品国产综合久久福利软件| 国产日韩精品一区| 天天影视涩香欲综合网| 国产成a人亚洲精品| 欧美日本乱大交xxxxx| 国产亚洲一本大道中文在线| 亚洲第四色夜色| www.综合网.com| 日韩精品一区二区三区蜜臀| 亚洲欧美经典视频| 国产成人av资源| 欧美一区二区日韩一区二区| 亚洲另类中文字| 精品系列免费在线观看| 欧美日韩一卡二卡三卡| 国产精品女同互慰在线看| 乱中年女人伦av一区二区| 欧日韩精品视频| 亚洲天堂免费在线观看视频| 狠狠色综合色综合网络| 欧美精品色一区二区三区| 国产精品国产精品国产专区不蜜 | 性久久久久久久久久久久 | 日本精品免费观看高清观看| 国产亚洲精品7777| 麻豆精品久久久| 欧美日本乱大交xxxxx| 亚洲激情自拍偷拍| 99国产精品久| 中文字幕av在线一区二区三区| 麻豆国产精品官网| 91精品欧美福利在线观看| 亚洲精品欧美综合四区| 欧美伦理视频网站| 欧美亚洲尤物久久| 久久网站最新地址| 蜜桃视频在线观看一区二区| 在线一区二区视频| 亚洲免费av高清| 成人av午夜电影| 国产欧美精品区一区二区三区| 经典三级在线一区| 欧美一级理论片| 日本一区中文字幕| 555www色欧美视频| 五月激情六月综合| 91精品国产欧美一区二区| 三级一区在线视频先锋 | 欧美三级电影在线看| 久久精品人人做| 国产精品系列在线观看| 久久九九99视频| 成人黄色在线网站| 亚洲精品欧美激情| 色美美综合视频| 性做久久久久久免费观看欧美| 欧美日韩一区不卡| 免费黄网站欧美| www久久久久| 国产91精品久久久久久久网曝门| 国产欧美一区在线| 成人美女视频在线看| 中文字幕第一区| 色综合久久久久综合| 午夜国产精品一区| xfplay精品久久| 成人av影视在线观看| 一区二区三区欧美激情| 欧美精品777| 国产麻豆成人精品| 国产精品国产馆在线真实露脸| 日本韩国欧美在线| 蜜桃传媒麻豆第一区在线观看| 久久久久久久久97黄色工厂| av电影天堂一区二区在线| 亚洲成人先锋电影| 久久久久久久久久久电影| av色综合久久天堂av综合| 午夜一区二区三区在线观看| 日韩欧美视频一区| 99精品久久免费看蜜臀剧情介绍| 午夜激情久久久| 欧美mv和日韩mv的网站|