亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? network.cpp

?? 此代碼經過大量使用
?? CPP
字號:
/***************************************************************************                          network.cpp  -  description                             -------------------    copyright            : (C) 2001, 2002 by Matt Grover    email                : mgrover@amygdala.org ***************************************************************************//*************************************************************************** *                                                                         * *   This program is free software; you can redistribute it and/or modify  * *   it under the terms of the GNU General Public License as published by  * *   the Free Software Foundation; either version 2 of the License, or     * *   (at your option) any later version.                                   * *                                                                         * ***************************************************************************/// Uncomment for debugging messages//#define DEBUG_NETWORKusing namespace std;#include "types.h"#include <math.h>#include <stdio.h>#include <iostream>#include <string>#include <iterator>#include <algorithm>#include <vector>#include <list>#if GCC_VERSION >= 30000    #include <ext/hash_map>#else    #include <hash_map>#endif#include "neuron.h"#include "basicneuron.h"#include "layer.h"#include "network.h"#include "simplespikeinput.h"#include "functionlookup.h"#include "utilities.h"#ifdef DEBUG_NETWORK#include <sys/time.h>#endifclass SimpleSpikeInput;Network::Network(){    // initialize the lookup tables    SetDefaults();    pspLRes = 100;  // unit = microseconds => 0.1 ms    pspLSize = 1000;    functionRef = new FunctionLookup();    spikeInput = new SimpleSpikeInput(this);    // default input object    netSize = 0;    eventRequestCount = 0;    isLayered = false;    runCount++;    trainingMode = true;    maxSpikeDelay = 0;    maxSpikeDelaySyn = 0;    currSpikeDelayOffset = 0;    maxOffset = 0;    spikeDelaysOn = false;    spikeBatchCount = 0;}void Network::SetDefaults(){    nextNeuronId = 1;    nextLayerId = 1;    streamingInput = false;}Network::~Network(){    // We don't know if the keys are contiguous    // in net, so use an iterator to delete    // Neurons instead of subscripting    hash_map<AmIdInt, Neuron*>::iterator netItr;    netItr = net.begin();    while (netItr != net.end()) {        delete netItr->second;        netItr->second = 0;        netItr++;    }    net.clear();    runCount--;    delete functionRef;    delete spikeInput;    for(layer_iterator layer = layers.begin(); layer != layers.end(); layer++){        delete layer->second;    }}// Initialize staticsAmTimeInt Network::simTime = 0;unsigned int Network::runCount = 0;AmTimeInt Network::simStepSize = 100;  // Default must match Neuron::simStepSize default!void Network::ResetSimTime(){    // FIXME: Some checks need to be developed to make    // sure that it is safe to reset simTime right now.    // For example, simTime should not be reset if    // any Networks are in the run loop.  This should    // maybe be done as part of a more general reset routine    // that clears out old data, etc.  A flag would have to be    // set to indicate to other instances that they need to reset, also.    simTime = 0;}void Network::SetTimeStepSize(AmTimeInt stepSize){    // FIXME: Need a static flag to keep this from running after    // neurons have been added to a net.    simStepSize = stepSize;    Neuron::simStepSize = stepSize;}Neuron* Network::AddNeuron(LayerType lType, AmIdInt nId){    if (nId >= nextNeuronId) {        nextNeuronId = nId + 1;    }    //cout << "Adding neuron " << nId << endl;    BasicNeuron* nrn = new BasicNeuron(nId);    net[nId] = nrn;    nrn->SetTableDimensions(pspLSize, pspLRes);    nrn->SetLookupTables(functionRef);    nrn->SetLayerType(lType);    nrn->SetParentNet(this);    nrn->TrainingOn(trainingMode);    return nrn;}Neuron* Network::AddNeuron(LayerType lType, Neuron* nrn){    AmIdInt nId = 0;    nId = nrn->GetID();    if (nId >= nextNeuronId) {        nextNeuronId = nId + 1;    }    //cout << "Adding neuron " << nId << endl;    net[nId] = nrn;    nrn->SetTableDimensions(pspLSize, pspLRes);    nrn->SetLookupTables(functionRef);    nrn->SetLayerType(lType);    nrn->SetParentNet(this);    nrn->TrainingOn(trainingMode);    return nrn;}bool Network::ConnectNeurons(Neuron* preSynapticNeuron,                                Neuron* postSynapticNeuron,                                float weight,                                AmTimeInt delay=0){// FIXME: Sign enforcement is not working. There is confusion//  as to when a neuron should be designated as inhibitory.//  This code should maybe run in the neuron instead, and//  a static function can set the enforceSign flag.// NOTE: EnforceSign() has been made a static member of Neuron,//  but this section of code has still not been tested. Remove//  the fixme once this is confirmed to work correctly.    if ( Neuron::EnforceSign() ) {        // Make sure weight has correct sign        if ( preSynapticNeuron->Inhibitory() ) {            if (weight > 0.0) {                cerr << "Inhibitory neuron " << preSynapticNeuron->GetID() <<                    " has a positive weight!\n";                return false;            }        }        else if (weight < 0.0) {            cerr << "Excitatory neuron " << preSynapticNeuron->GetID() <<                " has a negative weight!\n";            return false;        }    }    try {        Synapse* syn = new Synapse(postSynapticNeuron, weight, delay);        preSynapticNeuron->AddSynapse(syn);        if (delay > maxSpikeDelay) {            maxSpikeDelay = delay;            maxSpikeDelaySyn = syn;        }    }    catch(string& e) {        cerr << e << endl;        return false;    }    catch(...) {        cerr << "An error occured while connecting the neurons.\n";        return false;    }    return true;}bool Network::ConnectNeurons(AmIdInt preSynapticNeuron,                                AmIdInt postSynapticNeuron,                                float weight,                                AmTimeInt delay=0){    return ConnectNeurons(net[preSynapticNeuron], net[postSynapticNeuron], weight, delay);}void Network::AddLayer(Layer* newLayer){    unsigned int lId = newLayer->LayerId();    if (!lId) {        lId = nextLayerId;        newLayer->SetLayerId(lId);    }    if (lId >= nextLayerId) {        nextLayerId = lId + 1;    }    // we don't want duplicate Layer IDs    for(layer_iterator layer = layers.begin(); layer != layers.end(); layer++){        Layer *l = layer->second;        if(l->LayerId() == lId) throw string("Layer ID: " + Utilities::itostr(lId) + " already in use");    }    layers[newLayer->LayerId()] = newLayer;    newLayer->SetLayerParent(this);    isLayered = true;}void Network::ScheduleNEvent(NEvent eventType,                            AmTimeInt eventTime,                            Neuron* reqNrn){    SpikeRequest newSpike;    if (eventType > RMSPIKE) {        // initialize a new event request        newSpike.requestTime = simTime;        newSpike.requestor = reqNrn;        newSpike.spikeTime = eventTime;        newSpike.requestOrder = eventRequestCount++;        // insert into the queue        if (eventType == SPIKE || eventType == RESPIKE) {            eventQ.push(newSpike);        }        else if (eventType == INPUTSPIKE) {            inputQ.push(newSpike);        }    }}void Network::ScheduleNEvent(NEvent eventType,                            AmTimeInt eventTime,                            AmIdInt reqNrnId){    Neuron* nrn = net.find(reqNrnId)->second;    if (nrn) {        ScheduleNEvent(eventType, eventTime, nrn);    }    else {        string errMsg = "Neuron ID could not be found.";        throw errMsg;    }}void Network::Run(AmTimeInt maxRunTime){    // This is the main loop    // If streaming input is being used, keep on going until simTime >= maxRunTime.    // Otherwise, run until the event queue is empty or    // until simTime >= maxRunTime -- whichever happens first.    bool stopRun = false;    const unsigned int stopTime = maxRunTime + simTime;    AmTimeInt nextInputTime = 0;  	#ifdef DEBUG_NETWORK  	timeval time1;  	timeval time2;    int totalTime;    #endif    if (simTime == 0)        simTime = simStepSize;    // Initialize the delayed spike queue if it has not been done.    if (!delayedSpikeQ.size()) {        InitializeDelayedSpikeQ();    }    if (eventQ.empty() && inputQ.empty()) {        if(!streamingInput) {            stopRun = true;        }        else {            stopRun = false;        }    }    else {        if (!inputQ.empty()) {            nextInputTime = inputQ.top().spikeTime;        }        else {            nextInputTime = 0;        }        stopRun = false;    }    while (!stopRun) {        while (!eventQ.empty()) {            const SpikeRequest& topSpike = eventQ.top();            if (topSpike.spikeTime != simTime)                break;            #ifdef DEBUG_NETWORK            cout << "\nSending spike from Neuron: " << topSpike.requestor->GetID() << endl;            gettimeofday(&time1, NULL);            #endif            topSpike.requestor->SendSpike(simTime);            eventQ.pop();            #ifdef DEBUG_NETWORK            gettimeofday(&time2, NULL);            totalTime = time2.tv_usec - time1.tv_usec;            cout << "\nTotal time for sending spike: " << totalTime << "us" << endl;            #endif  	        }        if (streamingInput) {            spikeInput->ReadInputBuffer();            if (!inputQ.empty()) {                nextInputTime = inputQ.top().spikeTime;            }            else {                nextInputTime = 0;            }        }        while (nextInputTime <= simTime) {            if (nextInputTime == 0)                break;            inputQ.top().requestor->SendSpike(simTime);            inputQ.pop();            if (!inputQ.empty()) {                nextInputTime = inputQ.top().spikeTime;            }            else {                nextInputTime = 0;            }        }        // send the delayed spikes        // this must be called after Neuron::SendSpike()        // has been called for the last time during this        // time step        if (spikeDelaysOn) {            SendDelayedSpikes();        }        // increment simTime        if (runCount > 1) {            // A call to IncrementSimTime() is made if more than one            // instance of Network is running in a process.  This is done            // to keep all of the Networks synchronized and to handle any            // threading issues that may arise due to simTime being static.            IncrementSimTime();        }        else {            simTime += simStepSize;        }        if (simTime >= stopTime) {            stopRun = true;        }        else if (!streamingInput) {            if (eventQ.empty() && inputQ.empty() && !spikeBatchCount) {                stopRun = true;            }        }    }}void Network::SetSpikeInput(SpikeInput* sIn){    delete spikeInput;    spikeInput = sIn;}void Network::IncrementSimTime(){    simTime += simStepSize;}void Network::SetTrainingMode(bool tMode){    if (tMode == trainingMode) {        return;    }    trainingMode = tMode;    hash_map<AmIdInt, Neuron*>::iterator itr;    itr = net.begin();    while (itr != net.end()) {        itr->second->TrainingOn(tMode);        itr++;    }}void Network::ScheduleSpikeDelay(vector<Synapse*>& axon){    unsigned int maxOffset = delayedSpikeQ.size() - 1;    for (vector<Synapse*>::iterator it=axon.begin(); it!=axon.end(); ++it) {        AmTimeInt offset = (*it)->GetOffset();        offset += currSpikeDelayOffset;        // If the offset goes past the end of the queue, then        // start back at the beginning        if (offset > maxOffset) {            offset -= (maxOffset + 1);        }        delayedSpikeQ[offset].push_back((*it));    }    spikeBatchCount += axon.size();}void Network::SendDelayedSpikes(){    vector<Synapse*>& spikeBatch = delayedSpikeQ[currSpikeDelayOffset];    if (!spikeBatch.size()) {        IncrementDelayOffset();        return;    }    // sort the elements in the current offset batch    // according to neuronId and get a pointer to    // the first element    vector<Synapse*>::iterator beginItr = spikeBatch.begin();    vector<Synapse*>::iterator endItr = spikeBatch.end();    static CompareSynapse comp;    sort(beginItr, endItr, comp);    //Synapse* firstSyn = spikeBatch[0];    SynapseItr firstSyn = spikeBatch.begin();    unsigned int numSyn = 0;    // parse the spikeBatch vector and send one group of spikes    // to each neuron in the vector.    for (unsigned int i=0; i<spikeBatch.size(); ++i) {        if (spikeBatch[i]->GetPostNeuron() != (*firstSyn)->GetPostNeuron()) {            (*firstSyn)->GetPostNeuron()->InputSpike(firstSyn, simTime, numSyn);            //firstSyn = spikeBatch[i];            firstSyn += numSyn;            numSyn = 1;        }        else {            ++numSyn;        }    }    // send the last batch of delayed spikes    (*firstSyn)->GetPostNeuron()->InputSpike(firstSyn, simTime, numSyn);    // clean up    spikeBatchCount -= spikeBatch.size();    spikeBatch.clear();    IncrementDelayOffset();}void Network::InitializeDelayedSpikeQ(){    if (maxSpikeDelay) {        spikeDelaysOn = true;        Neuron::EnableSpikeBatching();    }    maxOffset = maxSpikeDelay/simStepSize;    delayedSpikeQ.reserve(maxOffset+1);    delayedSpikeQ.resize(maxOffset+1);    for (unsigned int i=0; i<delayedSpikeQ.size(); ++i) {        // TODO: Assuming 100 delayed spikes per offset for now.        // A more intelligent algorithm to determine proper        // sizing should be developed later on.        delayedSpikeQ[i].reserve(100);    }}inline void Network::IncrementDelayOffset(){    if (currSpikeDelayOffset >= maxOffset) {        currSpikeDelayOffset = 0;    }    else {        ++currSpikeDelayOffset;    }}Layer * Network::GetLayer(AmIdInt layerId){    return layers[layerId];}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
精品国产乱码久久久久久久久| 日韩精品一区在线观看| 久久婷婷久久一区二区三区| 成人欧美一区二区三区1314| 国产一区激情在线| 欧美日韩高清不卡| 一区二区三区在线视频观看58| 国产成人av资源| 精品久久久久久久人人人人传媒| 亚洲国产wwwccc36天堂| av影院午夜一区| 国产亚洲欧美一区在线观看| 久久99久久久久久久久久久| 欧美日韩精品一区视频| 一区二区三区日韩精品视频| av一区二区三区在线| 国产女人18毛片水真多成人如厕 | 亚洲美女少妇撒尿| 高清国产一区二区三区| 26uuu成人网一区二区三区| 麻豆中文一区二区| 欧美人狂配大交3d怪物一区 | 26uuu另类欧美| 捆绑紧缚一区二区三区视频| 91麻豆精品国产91久久久资源速度 | 日韩一区二区精品在线观看| 午夜免费欧美电影| 91福利在线看| 伊人婷婷欧美激情| 99国产一区二区三精品乱码| 国产精品网站在线观看| 国产成人精品影视| 久久精品人人做人人综合| 狠狠网亚洲精品| 欧美成人女星排名| 免费在线一区观看| 日韩一区二区三区视频在线| 视频一区国产视频| 制服丝袜激情欧洲亚洲| 秋霞电影网一区二区| 91精品欧美福利在线观看| 亚洲国产中文字幕| 欧美片在线播放| 日韩av在线发布| 日韩一级黄色大片| 久久黄色级2电影| 精品国产成人在线影院| 国产真实乱偷精品视频免| 久久久久久免费| 风间由美性色一区二区三区| 亚洲国产精品成人综合| www.日韩精品| 亚洲综合色成人| 欧美日本一区二区三区四区| 蜜桃视频在线观看一区| www成人在线观看| 国产成人丝袜美腿| ...xxx性欧美| 在线精品视频一区二区三四| 亚洲h动漫在线| 欧美一区欧美二区| 国产精品一区二区在线观看不卡| 国产女主播一区| 91蜜桃网址入口| 日韩精品国产欧美| 2023国产精品自拍| 不卡欧美aaaaa| 亚洲成a天堂v人片| 2022国产精品视频| 99视频在线精品| 性欧美疯狂xxxxbbbb| 欧美精品一区二区三区蜜桃 | 精东粉嫩av免费一区二区三区| 久久亚洲私人国产精品va媚药| 成人亚洲精品久久久久软件| 亚洲免费观看在线视频| 欧美日韩国产经典色站一区二区三区| 男男gaygay亚洲| 国产精品青草久久| 精品视频在线免费看| 精品一区二区三区免费播放| 国产精品久久久久久久第一福利 | 国产一区日韩二区欧美三区| 中文字幕中文字幕中文字幕亚洲无线| 欧美视频在线一区二区三区| 麻豆成人av在线| 国产精品日日摸夜夜摸av| 欧美影片第一页| 国内精品自线一区二区三区视频| 亚洲私人黄色宅男| 日韩欧美成人一区| 91丨九色丨蝌蚪富婆spa| 秋霞影院一区二区| 中文字幕人成不卡一区| 欧美一级日韩一级| 91污片在线观看| 久久99热这里只有精品| 亚洲主播在线播放| 国产三级精品在线| 欧美高清视频不卡网| 国产v综合v亚洲欧| 青青青伊人色综合久久| 亚洲欧美日韩综合aⅴ视频| 精品美女一区二区| 欧美午夜精品久久久久久孕妇| 国产在线播放一区三区四| 亚洲福利电影网| 国产精品蜜臀在线观看| 日韩精品中文字幕一区 | 高清shemale亚洲人妖| 午夜伦理一区二区| 亚洲三级理论片| 久久久亚洲精华液精华液精华液 | aaa亚洲精品| 国内久久精品视频| 日韩精品一二区| 亚洲美女偷拍久久| 国产亚洲一区字幕| 日韩美女天天操| 欧美日韩精品一区二区天天拍小说 | 亚洲美女在线一区| 日本一区二区动态图| 欧美videos中文字幕| 欧美精品99久久久**| 91视视频在线观看入口直接观看www | 久久精品一区四区| 日韩精品资源二区在线| 3d成人h动漫网站入口| 欧美亚洲国产bt| 一本大道av一区二区在线播放| 国产成人在线看| 激情六月婷婷综合| 日韩国产成人精品| 亚洲国产成人porn| 亚洲综合免费观看高清完整版| 亚洲视频一二三| 国产精品理论片在线观看| 国产亚洲精品精华液| 欧美精品一区二区三区高清aⅴ| 欧美一区二区免费观在线| 欧美日韩国产经典色站一区二区三区| 色婷婷久久99综合精品jk白丝| a在线播放不卡| 成人av网站免费观看| 成人污视频在线观看| 国产91清纯白嫩初高中在线观看| 国产一区二区网址| 国产精品正在播放| 国模少妇一区二区三区| 国产在线国偷精品产拍免费yy | 洋洋成人永久网站入口| 亚洲伦理在线免费看| 一区二区三区日韩欧美精品| 一区二区三区小说| 亚洲国产欧美在线| 亚洲国产日韩a在线播放性色| 亚洲国产人成综合网站| 亚洲午夜久久久| 丝袜亚洲另类欧美| 免费久久精品视频| 精品影院一区二区久久久| 国产一区二区三区美女| 国产电影一区在线| 成人免费视频一区二区| 99久久99久久免费精品蜜臀| 91在线视频免费观看| 色婷婷亚洲精品| 欧美性xxxxxx少妇| 欧美日韩一本到| 日韩午夜电影在线观看| 精品人在线二区三区| 国产午夜精品一区二区三区视频| 国产精品美日韩| 一区二区三区中文字幕电影| 午夜久久久影院| 久久精品国产免费| 国产sm精品调教视频网站| 99国产欧美另类久久久精品 | 国产精品一品二品| 99久久精品免费看| 91国产福利在线| 日韩一区二区电影在线| 久久婷婷国产综合国色天香| 欧美韩国日本一区| 夜夜揉揉日日人人青青一国产精品| 狠狠色狠狠色综合系列| 中文字幕日韩精品一区 | 日韩视频一区二区在线观看| 欧美一区二区在线视频| 精品播放一区二区| 国产精品私房写真福利视频| 亚洲免费看黄网站| 日本视频中文字幕一区二区三区| 韩国欧美国产1区| 色综合久久久久综合99| 欧美一级电影网站| 欧美国产亚洲另类动漫| 亚洲成人综合视频| 国产剧情一区二区| 欧美中文字幕亚洲一区二区va在线|