亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? cneuralnet.cpp

?? 開發游戲人工智能的王道書
?? CPP
字號:
#include "CNeuralNet.h"


//*************************** methods for Neuron **********************
//
//---------------------------------------------------------------------
SNeuron::SNeuron(int NumInputs): m_iNumInputs(NumInputs+1),
                                 m_dActivation(0),
                                 m_dError(0)
											
{
	//we need an additional weight for the bias hence the +1
	for (int i=0; i<NumInputs+1; ++i)
	{
		//set up the weights with an initial random value
		m_vecWeight.push_back(RandomClamped());

    m_vecPrevUpdate.push_back(0);
	}
}




//************************ methods for NeuronLayer **********************

//-----------------------------------------------------------------------
//	ctor creates a layer of neurons of the required size by calling the 
//	SNeuron ctor the rqd number of times
//-----------------------------------------------------------------------
SNeuronLayer::SNeuronLayer(int NumNeurons, 
                           int NumInputsPerNeuron):	m_iNumNeurons(NumNeurons)
{
	for (int i=0; i<NumNeurons; ++i)

		m_vecNeurons.push_back(SNeuron(NumInputsPerNeuron));
}




//************************ methods forCNeuralNet ************************



//------------------------------- ctor -----------------------------------
//
//------------------------------------------------------------------------
CNeuralNet::CNeuralNet(int NumInputs,
                       int NumOutputs,
                       int HiddenNeurons,
                       double LearningRate):m_iNumInputs(NumInputs),
                                            m_iNumOutputs(NumOutputs),
                                            m_iNumHiddenLayers(1),
                                            m_iNeuronsPerHiddenLyr(HiddenNeurons),
                                            m_dLearningRate(LearningRate),
                                            m_dErrorSum(9999),
                                            m_bTrained(false),
                                            m_iNumEpochs(0)
{
	CreateNet();
}

//------------------------------createNet()------------------------------
//
//	this method builds the ANN. The weights are all initially set to 
//	random values -1 < w < 1
//------------------------------------------------------------------------
void CNeuralNet::CreateNet()
{
	//create the layers of the network
	if (m_iNumHiddenLayers > 0)
	{
		//create first hidden layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNeuronsPerHiddenLyr, m_iNumInputs));
    
    for (int i=0; i<m_iNumHiddenLayers-1; ++i)
    {

			m_vecLayers.push_back(SNeuronLayer(m_iNeuronsPerHiddenLyr,
                                         m_iNeuronsPerHiddenLyr));
    }

    //create output layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNumOutputs, m_iNeuronsPerHiddenLyr));
	}

  else
  {
	  //create output layer
	  m_vecLayers.push_back(SNeuronLayer(m_iNumOutputs, m_iNumInputs));
  }
}


//--------------------------- Initialize ---------------------------------
//
//  randomizes all the weights to values btween 0 and 1
//------------------------------------------------------------------------
void CNeuralNet::InitializeNetwork()
{
	//for each layer
	for (int i=0; i<m_iNumHiddenLayers + 1; ++i)
	{
		//for each neuron
		for (int n=0; n<m_vecLayers[i].m_iNumNeurons; ++n)
		{
			//for each weight
			for (int k=0; k<m_vecLayers[i].m_vecNeurons[n].m_iNumInputs; ++k)
			{
				m_vecLayers[i].m_vecNeurons[n].m_vecWeight[k] = RandomClamped();
			}
		}
	}

  m_dErrorSum  = 9999;
  m_iNumEpochs = 0;

	return;
}

//-------------------------------Update-----------------------------------
//
//	given an input vector this function calculates the output vector
//
//------------------------------------------------------------------------
vector<double> CNeuralNet::Update(vector<double> inputs)
{
	//add in some noise to the data
  for (int k=0; k<inputs.size(); ++k)
  {
    inputs[k]+=RandFloat() * MAX_NOISE_TO_ADD;
  }
  
  //stores the resultant outputs from each layer
	vector<double> outputs;
  
	int cWeight = 0;
	
	//first check that we have the correct amount of inputs
	if (inputs.size() != m_iNumInputs)
  {
		//just return an empty vector if incorrect.
		return outputs;
  }
	
	//For each layer...
	for (int i=0; i<m_iNumHiddenLayers + 1; ++i)
	{
		
		if ( i > 0 )
    {
			inputs = outputs;
    }

		outputs.clear();
		
		cWeight = 0;

		//for each neuron sum the (inputs * corresponding weights).Throw 
		//the total at our sigmoid function to get the output.
		for (int n=0; n<m_vecLayers[i].m_iNumNeurons; ++n)
		{
			double netinput = 0.0f;

			int	NumInputs = m_vecLayers[i].m_vecNeurons[n].m_iNumInputs;
			
			//for each weight
			for (int k=0; k<NumInputs - 1; ++k)
			{
				//sum the weights x inputs
				netinput += m_vecLayers[i].m_vecNeurons[n].m_vecWeight[k] * 
                    inputs[cWeight++];
			}

			//add in the bias
			netinput += m_vecLayers[i].m_vecNeurons[n].m_vecWeight[NumInputs-1] * 
                  BIAS;

			 
      //The combined activation is first filtered through the sigmoid 
      //function and a record is kept for each neuron 
      m_vecLayers[i].m_vecNeurons[n].m_dActivation = 
        Sigmoid(netinput, ACTIVATION_RESPONSE);

			//store the outputs from each layer as we generate them.
      outputs.push_back(m_vecLayers[i].m_vecNeurons[n].m_dActivation);

			cWeight = 0;
		}
	}

	return outputs;
}

//----------------------------NetworkTrainingEpoch -----------------------
//
//  given a training set this method trains the network using backprop.
//  The training sets comprise of series of input vectors and a series
//  of output vectors.
//  Returns false if there is a problem
//------------------------------------------------------------------------
bool CNeuralNet::NetworkTrainingEpoch(vector<iovector> &SetIn,
                                      vector<iovector> &SetOut)
{
  //create some iterators
  vector<double>::iterator  curWeight;
  vector<SNeuron>::iterator curNrnOut, curNrnHid;

  double WeightUpdate = 0;

  //this will hold the cumulative error value for the training set
  m_dErrorSum = 0;

  //run each input pattern through the network, calculate the errors and update
  //the weights accordingly
  for (int vec=0; vec<SetIn.size(); ++vec)
  {
    //first run this input vector through the network and retrieve the outputs
    vector<double> outputs = Update(SetIn[vec]);

    //return if error has occurred
    if (outputs.size() == 0)
    {
      return false;
    }

    //for each output neuron calculate the error and adjust weights
    //accordingly
    for (int op=0; op<m_iNumOutputs; ++op)
    {
      //first calculate the error value
      double err = (SetOut[vec][op] - outputs[op]) * outputs[op]
                   * (1 - outputs[op]);

      //update the error total. (when this value becomes lower than a
      //preset threshold we know the training is successful)
      m_dErrorSum += (SetOut[vec][op] - outputs[op]) *
                     (SetOut[vec][op] - outputs[op]);      

      //keep a record of the error value
      m_vecLayers[1].m_vecNeurons[op].m_dError = err;

      curWeight = m_vecLayers[1].m_vecNeurons[op].m_vecWeight.begin();
      curNrnHid = m_vecLayers[0].m_vecNeurons.begin();

      int w = 0;

      //for each weight up to but not including the bias
      while(curWeight != m_vecLayers[1].m_vecNeurons[op].m_vecWeight.end()-1)
      {
        //calculate weight update
        WeightUpdate = err * m_dLearningRate * curNrnHid->m_dActivation;
        
        //calculate the new weight based on the backprop rules and adding in momentum
        *curWeight += WeightUpdate + m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] * MOMENTUM;

        //keep a record of this weight update
        m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] = WeightUpdate;

        ++curWeight; ++curNrnHid; ++w;
      }

      //and the bias for this neuron
      WeightUpdate = err * m_dLearningRate * BIAS;

      *curWeight += WeightUpdate + m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] * MOMENTUM;  

      //keep a record of this weight update
      m_vecLayers[1].m_vecNeurons[op].m_vecPrevUpdate[w] = WeightUpdate;
      
      
    }

   //**moving backwards to the hidden layer**
    curNrnHid = m_vecLayers[0].m_vecNeurons.begin();

    int n = 0;
    
    //for each neuron in the hidden layer calculate the error signal
    //and then adjust the weights accordingly
    while(curNrnHid != m_vecLayers[0].m_vecNeurons.end())
    {
      double err = 0;

      curNrnOut = m_vecLayers[1].m_vecNeurons.begin();

      //to calculate the error for this neuron we need to iterate through
      //all the neurons in the output layer it is connected to and sum
      //the error * weights
      while(curNrnOut != m_vecLayers[1].m_vecNeurons.end())
      {
        err += curNrnOut->m_dError * curNrnOut->m_vecWeight[n];

        ++curNrnOut;
      }

      //now we can calculate the error
      err *= curNrnHid->m_dActivation * (1 - curNrnHid->m_dActivation);     
      
      //for each weight in this neuron calculate the new weight based
      //on the error signal and the learning rate
      for (int w=0; w<m_iNumInputs; ++w)
      {
        WeightUpdate = err * m_dLearningRate * SetIn[vec][w];

        //calculate the new weight based on the backprop rules and adding in momentum
        curNrnHid->m_vecWeight[w] += WeightUpdate + curNrnHid->m_vecPrevUpdate[w] * MOMENTUM;
        
        //keep a record of this weight update
        curNrnHid->m_vecPrevUpdate[w] = WeightUpdate;

      }

      //and the bias
      WeightUpdate = err * m_dLearningRate * BIAS;

      curNrnHid->m_vecWeight[m_iNumInputs] += WeightUpdate + curNrnHid->m_vecPrevUpdate[w] * MOMENTUM;

      //keep a record of this weight update
       curNrnHid->m_vecPrevUpdate[w] = WeightUpdate;

      ++curNrnHid;
      ++n;
    }

  }//next input vector
  return true;
}

//----------------------------- Train ------------------------------------
//
//  Given some training data in the form of a CData object this function
//  trains the network until the error is within acceptable limits.
// 
//  the HWND is required to give some graphical feedback
//------------------------------------------------------------------------
bool CNeuralNet::Train(CData* data, HWND hwnd)
{
  vector<vector<double> > SetIn  = data->GetInputSet();
  vector<vector<double> > SetOut = data->GetOutputSet();

   //first make sure the training set is valid
   if ((SetIn.size()     != SetOut.size())  || 
       (SetIn[0].size()  != m_iNumInputs)   ||
       (SetOut[0].size() != m_iNumOutputs))
   {
     MessageBox(NULL, "Inputs != Outputs", "Error", NULL);
    
     return false;
   }
  
   //initialize all the weights to small random values
   InitializeNetwork();

   //train using backprop until the SSE is below the user defined
   //threshold
   while( m_dErrorSum > ERROR_THRESHOLD )
   {
     //return false if there are any problems
     if (!NetworkTrainingEpoch(SetIn, SetOut))
     {
       return false;
     }

     ++m_iNumEpochs;
     
     //call the render routine to display the error sum
     InvalidateRect(hwnd, NULL, TRUE);
		 UpdateWindow(hwnd);
   }

   m_bTrained = true;
   
   return true;
}


//-------------------------------Sigmoid function-------------------------
//
//------------------------------------------------------------------------
double CNeuralNet::Sigmoid(double netinput, double response)
{
	return ( 1 / ( 1 + exp(-netinput / response)));
}


?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产午夜精品久久久久久久| ...中文天堂在线一区| www.性欧美| 日本免费新一区视频| 中文字幕一区三区| 欧美精品一区二区三区四区 | 日本一区二区三级电影在线观看 | 北条麻妃国产九九精品视频| 美日韩一区二区| 亚洲在线视频网站| 亚洲国产精品av| 欧美成人激情免费网| 欧美群妇大交群中文字幕| 91在线观看成人| 大桥未久av一区二区三区中文| 麻豆精品一区二区av白丝在线| 亚洲欧美日韩一区二区 | 欧美色欧美亚洲另类二区| 成人妖精视频yjsp地址| 久久精品国产精品亚洲综合| 亚洲成av人影院| 亚洲一区二区三区国产| 亚洲乱码中文字幕| 中文字幕一区二| 中文字幕一区二区三区在线观看| 精品处破学生在线二十三| 欧美一级日韩一级| 欧美日韩美少妇| 欧美亚洲综合一区| 91黄色免费观看| 欧美系列在线观看| 色乱码一区二区三区88| 色妞www精品视频| 色婷婷综合久色| 在线观看日韩毛片| 欧美性大战久久久| 欧美日韩精品专区| 欧美丰满少妇xxxbbb| 欧美高清dvd| 欧美一区永久视频免费观看| 91精品国产麻豆| 日韩欧美中文字幕精品| 欧美一级夜夜爽| 精品成人在线观看| 国产精品午夜在线观看| 国产精品不卡在线观看| 亚洲男人天堂一区| 性做久久久久久免费观看| 日韩高清在线不卡| 精品在线免费视频| 福利一区二区在线观看| av电影天堂一区二区在线| 色综合亚洲欧洲| 欧美三级日本三级少妇99| 3d动漫精品啪啪| 精品国产一区二区三区久久久蜜月 | 色8久久精品久久久久久蜜 | 日韩一级视频免费观看在线| 日韩午夜中文字幕| 欧美精品一区二区三区四区 | 亚洲一卡二卡三卡四卡| 午夜婷婷国产麻豆精品| 免费高清在线视频一区·| 国产精品一二三| 色综合久久久久网| 91精品黄色片免费大全| 久久久久国产精品麻豆ai换脸 | 狂野欧美性猛交blacked| 国产一区二区三区免费看| yourporn久久国产精品| 欧美日韩国产电影| 国产婷婷色一区二区三区四区| 亚洲人成精品久久久久| 日韩中文欧美在线| 国产精品羞羞答答xxdd| 欧美在线观看禁18| 久久在线观看免费| 亚洲另类在线制服丝袜| 男男gaygay亚洲| 成人av电影在线播放| 欧美肥胖老妇做爰| 中文字幕一区日韩精品欧美| 日韩中文字幕91| 99国产精品久久久久| 日韩视频123| 综合欧美一区二区三区| 日产国产高清一区二区三区| 国产**成人网毛片九色 | 97国产精品videossex| 日韩一区二区在线免费观看| 中文字幕亚洲一区二区va在线| 日本特黄久久久高潮| 91在线免费看| 精品国产成人在线影院| 亚洲综合成人网| 国产不卡视频一区二区三区| 这里只有精品免费| 综合色天天鬼久久鬼色| 色综合天天做天天爱| 日韩精品中文字幕在线不卡尤物| 亚洲欧洲精品天堂一级| 久久成人羞羞网站| 欧美午夜精品理论片a级按摩| 国产无一区二区| 久久99久久久久| 欧美日韩黄视频| 亚洲欧美另类小说| 国产suv精品一区二区三区| 日韩一区二区三区在线观看| 亚洲精品乱码久久久久久黑人 | 在线综合视频播放| 一区二区成人在线观看| 国产91综合一区在线观看| 欧美电影免费观看高清完整版在线观看| 亚洲老妇xxxxxx| av不卡一区二区三区| 中文子幕无线码一区tr| 国产一区二区日韩精品| 欧美成人福利视频| 免费成人在线播放| 欧美一区二区三区视频| 日韩高清不卡在线| 欧美精品久久99| 丝袜国产日韩另类美女| 欧美三级电影在线看| 一区二区三区毛片| 91免费国产在线观看| 综合久久国产九一剧情麻豆| 国产精品资源网站| 久久综合五月天婷婷伊人| 日产国产高清一区二区三区 | 久久久久久久久久看片| 久久精品久久综合| 日韩视频在线永久播放| 美女性感视频久久| 欧美电影免费观看完整版| 久久国产免费看| 亚洲精品在线观| 丰满放荡岳乱妇91ww| 国产日韩欧美一区二区三区乱码| 国产精品99久久久久久有的能看 | 欧美日本国产一区| 丝瓜av网站精品一区二区 | 91国偷自产一区二区三区观看 | 国产不卡免费视频| 日本一区免费视频| www.综合网.com| 亚洲欧美日韩国产成人精品影院 | 91国产丝袜在线播放| 亚洲一级二级三级在线免费观看| 在线观看区一区二| 亚洲人成影院在线观看| 欧美吻胸吃奶大尺度电影| 亚洲成av人影院| 日韩精品一区二区三区视频在线观看 | 久久亚洲影视婷婷| 国产精品 日产精品 欧美精品| 亚洲国产精品黑人久久久| av在线一区二区三区| 一区二区三区**美女毛片| 91麻豆精品国产自产在线| 精品中文av资源站在线观看| 欧美经典一区二区| 色狠狠色噜噜噜综合网| 三级久久三级久久| 久久你懂得1024| 91蝌蚪porny九色| 日本欧美韩国一区三区| 久久久午夜精品| 色婷婷久久久久swag精品| 无码av中文一区二区三区桃花岛| 精品成人免费观看| 日本精品一区二区三区高清| 蜜臀av一区二区在线免费观看| 国产拍欧美日韩视频二区| 91激情在线视频| 国产一区二区三区| 亚洲国产综合91精品麻豆| 精品粉嫩aⅴ一区二区三区四区| jizzjizzjizz欧美| 亚洲成av人片在www色猫咪| 久久综合色婷婷| 91成人在线精品| 国产麻豆日韩欧美久久| 亚洲视频网在线直播| 欧美一区二区人人喊爽| 成人性生交大合| 蜜臂av日日欢夜夜爽一区| 国产精品久久久久永久免费观看| 欧美男男青年gay1069videost| 国产成人免费av在线| 亚洲国产日日夜夜| 国产精品看片你懂得| 精品久久久久久综合日本欧美| 97久久超碰精品国产| 国产一区二区免费视频| 午夜激情久久久| 亚洲图片激情小说| 欧美精品一区二区三区久久久| 欧美色图天堂网|