亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? statisticsoutput.cpp

?? amygdata的神經網絡算法源代碼
?? CPP
字號:
/***************************************************************************                          statisticsoutput.cpp  -  description                             -------------------    begin                : Wed Oct 24 2001    copyright            : (C) 2001 by Matt Grover    email                : mpgrover@sourceforge.net ***************************************************************************//*************************************************************************** *                                                                         * *   This program is free software; you can redistribute it and/or modify  * *   it under the terms of the GNU General Public License as published by  * *   the Free Software Foundation; either version 2 of the License, or     * *   (at your option) any later version.                                   * *                                                                         * ***************************************************************************/#include "config.h"#include <map>#include <vector>#include <stdexcept>#include <amygdala/network.h>#include <amygdala/neuron.h>#include <amygdala/statisticsoutput.h>using namespace std;using namespace Amygdala;StatisticsOutput::StatisticsOutput():SpikeOutput(){//    combinedHistogram = 0;    maxPeakId = 0;    maxPeakTime = 0;    meanRate = 0.0;    mostActiveCount = 0;    mostActiveId = 0;    totalSpikeCount = 0;    beginTime = 0;    stepSize = 1;    logging  = false;    logFd = NULL;    traceGroups = 0;}StatisticsOutput::~StatisticsOutput(){    // TODO: delete the elements of histogram and clear the    // map and do the same with outputHistory    combinedHistogram.clear();    if(logFd && logging) fclose(logFd);}void StatisticsOutput::OutputEvent(Neuron* nrn, AmTimeInt eventTime){    if(logging) Log(nrn, eventTime);    vector<AmTimeInt>& hist = outputHistory[nrn->GetId()];    hist.push_back(eventTime);    calcTime = eventTime;}void StatisticsOutput::AddTrace(unsigned int groupId){	if (groupId >= (sizeof(AmGroupInt)*8)) {		throw runtime_error("GroupId is too large");	}	unsigned int state=1;	state <<= groupId;	traceGroups |= state;}void StatisticsOutput::ClearHistory(){    map< AmIdInt, vector<unsigned int> >::iterator histItr;    histItr = histogram.begin();    while ( histItr != histogram.end() ) {        histItr->second.clear();        histItr++;    }    histogram.clear();    map< AmIdInt, vector<AmTimeInt> >::iterator outItr;    outItr = outputHistory.begin();    while ( outItr != outputHistory.end() ) {        outItr->second.clear();        outItr++;    }    outputHistory.clear();    combinedHistogram.clear();    beginTime = Network::GetNetworkRef()->SimTime();}vector<unsigned int>& StatisticsOutput::Histogram(){    // TODO: Check this function for correctness.    unsigned int i, eventCount = 0, eventIdx = 0, numElem = 0;    AmTimeInt endStep, binSize;    if (calcTime != Network::GetNetworkRef()->SimTime()) {        calcTime = Network::GetNetworkRef()->SimTime();        lastCalcTime = calcTime;        combinedHistogram.clear();    }    if (!combinedHistogram.size()) {        // find the bin size and number of elements in combinedHistogram        binSize = stepSize * 1000;        numElem = (calcTime - beginTime) / binSize;        // initialize combinedHistogram        for (i=0; i<numElem; i++) {            combinedHistogram.push_back(0);        }        maxPeakId = 0;        maxPeakTime = 0;        // Fill the histogram based on the raw data in outputHistory.        // This will be for all neurons, so use an iterator        map< AmIdInt, vector<AmTimeInt> >::iterator histItr;        histItr = outputHistory.begin();        while ( histItr != outputHistory.end() ) {            vector<AmTimeInt>& events = histItr->second;            unsigned int peakCount = 0;            eventCount = 0;            eventIdx = 0;            for (i=0; i<combinedHistogram.size(); i++) {                endStep = (i + 1) * binSize + beginTime;                if (eventIdx >= events.size()) {                    break;                }                while (events[eventIdx] < endStep) {                    eventCount++;                    if (++eventIdx >= events.size()) {                        break;                    }                }                combinedHistogram[i] += eventCount;                if (eventCount > peakCount) {                    peakCount = eventCount;                    maxPeakId = histItr->first;                    maxPeakTime = (i * binSize + beginTime) / 1000;                }                if (events.size() > mostActiveCount) {                    mostActiveCount = events.size();                    mostActiveId = histItr->first;                }                eventCount = 0;            }            histItr++;        }    }    return combinedHistogram;}vector<unsigned int>& StatisticsOutput::Histogram(AmIdInt neuronId){    // TODO: Check this function for correctness.    unsigned int i, eventCount = 0, eventIdx = 0, numElem = 0;    AmTimeInt endStep, binSize;    // clear out the vectors in histogram if they are old    if (calcTime != Network::GetNetworkRef()->SimTime()) {        calcTime = Network::GetNetworkRef()->SimTime();        lastCalcTime = calcTime;        map< AmIdInt, vector<unsigned int> >::iterator itr;        itr = histogram.begin();        while (itr != histogram.end()) {            itr->second.clear();            itr++;        }    }    vector<unsigned int>& nidHistogram = histogram[neuronId];    if (!nidHistogram.size()) {        // find the bin size and number of elements in combinedHistogram        binSize = stepSize * 1000;        numElem = (calcTime - beginTime) / binSize;        // initialize combinedHistogram        for (i=0; i<numElem; i++) {            nidHistogram.push_back(0);        }        vector<AmTimeInt>& events = outputHistory[neuronId];        eventCount = 0;        eventIdx = 0;        for (i=0; i<nidHistogram.size(); i++) {            endStep = (i + 1) * binSize + beginTime;            if (eventIdx >= events.size()) {                break;            }            while (events[eventIdx] < endStep) {                eventCount++;                if (++eventIdx >= events.size()) {                    break;                }            }            nidHistogram[i] += eventCount;            eventCount = 0;        }    }    return nidHistogram;}vector<unsigned int> StatisticsOutput::Histogram(AmTimeInt start, AmTimeInt end){    // TODO: Check this function for correctness.    unsigned int i, eventCount = 0, eventIdx = 0, numElem = 0;    AmTimeInt endStep, binSize;    vector<unsigned int> intervalHist;    if (start >= end) {        return intervalHist;    }    // find the bin size and number of elements in combinedHistogram    binSize = stepSize * 1000;    numElem = (end - start) / binSize;    // initialize combinedHistogram    for (i=0; i<numElem; i++) {        intervalHist.push_back(0);    }    // Fill the histogram based on the raw data in outputHistory.    // This will be for all neurons, so use an iterator    map< AmIdInt, vector<AmTimeInt> >::iterator histItr;    histItr = outputHistory.begin();    while ( histItr != outputHistory.end() ) {        vector<AmTimeInt>& events = histItr->second;        eventCount = 0;        eventIdx = 0;        for (i=0; i<intervalHist.size(); i++) {            endStep = (i + 1) * binSize + start;            if (eventIdx >= events.size()) {                break;            }            while (events[eventIdx] < endStep) {                eventCount++;                if (++eventIdx >= events.size()) {                    break;                }            }            intervalHist[i] += eventCount;            eventCount = 0;        }        histItr++;    }    return intervalHist;}unsigned int StatisticsOutput::TotalOutputSpikes(){    unsigned int numEvents = 0;    if (!combinedHistogram.size()) {        Histogram();    }    map< AmIdInt, vector<AmTimeInt> >::iterator outItr;    outItr = outputHistory.begin();    while ( outItr != outputHistory.end() ) {        numEvents += outItr->second.size();        outItr++;    }    return numEvents;}unsigned int StatisticsOutput::TotalOutputSpikes(AmIdInt neuronId){    if (!combinedHistogram.size()) {        Histogram();    }    unsigned int numEvents = outputHistory[neuronId].size();    return numEvents;}float StatisticsOutput::MeanSpikeRate(){    unsigned int numEvents = 0;    AmTimeInt endTime = Network::GetNetworkRef()->SimTime();    float avg;    map< AmIdInt, vector<AmTimeInt> >::iterator outItr;    outItr = outputHistory.begin();    while ( outItr != outputHistory.end() ) {        numEvents += outItr->second.size();        outItr++;    }    if ( (endTime - beginTime) > 0 ) {        avg = ( float(numEvents) / float(endTime - beginTime) ) * 1000000.0;    }    else {        avg = 0.0;    }    return avg;}float StatisticsOutput::MeanSpikeRate(AmIdInt neuronId){    float avg;    AmTimeInt endTime = Network::GetNetworkRef()->SimTime();    vector<AmTimeInt>& hist = outputHistory[neuronId];    unsigned int numEvents = hist.size();    if ( (endTime - beginTime) > 0 ) {        avg = ( float(numEvents) / float(endTime - beginTime) ) * 1000000.0;    }    else {        avg = 0.0;    }    return avg;}float StatisticsOutput::PeakSpikeRate(){    unsigned int i;    // TODO: This algorithm is good enough for temporary use,    // but a better one needs to be produced.  If a small    // stepSize is in use, this algorithm will give wildly    // inacurate results (1 spike/1 ms step = 1000 spikes/second)    if (!combinedHistogram.size()) {        Histogram();    }    for (i=0; i<combinedHistogram.size(); i++) {        // find combined peak rate and combined peak time    }    return combinedPeakRate;}	float StatisticsOutput::PeakSpikeRate(AmIdInt neuronId){    // TODO: This algorithm is good enough for temporary use,    // but a better one needs to be produced.  If a small    // stepSize is in use, this algorithm will give wildly    // inacurate results (1 spike/1 ms step = 1000 spikes/second)    vector<unsigned int> nHist = Histogram(neuronId);    unsigned int i, maxCount = 0;    for (i=0; i<nHist.size(); i++) {        if (nHist[i] > maxCount) {            maxCount = nHist[i];        }    }    float rate = (float(maxCount) / float(stepSize)) * 1000.0;    return rate;}	AmTimeInt StatisticsOutput::PeakRateTime(){    if (!combinedHistogram.size()) {        Histogram();    }    if (combinedPeakRate == 0.0) {        PeakSpikeRate();    }    return combinedPeakTime;}	AmTimeInt StatisticsOutput::PeakRateTime(AmIdInt neuronId){    vector<unsigned int> nHist = Histogram(neuronId);    unsigned int i, maxIndex = 0, maxCount = 0;    for (i=0; i<nHist.size(); i++) {        if (nHist[i] > maxCount) {            maxCount = nHist[i];            maxIndex = i;        }    }    return maxIndex * stepSize;}	AmIdInt StatisticsOutput::PeakNeuron(){    if (!combinedHistogram.size()) {        Histogram();    }    return maxPeakId;}	AmIdInt StatisticsOutput::MostActiveNeuron(){    if (!combinedHistogram.size()) {        Histogram();    }    return mostActiveId;}	void StatisticsOutput::SetStepSize(AmTimeInt step){    if (step != stepSize) {        combinedHistogram.clear();        map< AmIdInt, vector<unsigned int> >::iterator histItr;        histItr = histogram.begin();        while ( histItr != histogram.end() ) {            histItr->second.clear();            histItr++;        }        histogram.clear();    }    stepSize = step;}void StatisticsOutput::LogSpikeTimes(string filename, AmTimeInt start, AmTimeInt end){    logFd = fopen(filename.c_str(), "w");    if(!logFd) throw "Canot open file: " + filename;    logging = true;    logStart = start;    logEnd   = end;}void StatisticsOutput::Log(Neuron* nrn, AmTimeInt eventTime){    if(eventTime > logEnd){        fclose(logFd);        logging = false;        return;    }    	bool traceNeuron = (traceGroups & nrn->GetOutputGroupIndex());    if(traceNeuron){        fprintf(logFd, "%ld %ld\n", eventTime/1000, nrn->GetId());    }}void StatisticsOutput::CloseLog(){    if(logFd && logging) fclose(logFd);    logging = false;    logFd = NULL;}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
韩国午夜理伦三级不卡影院| 日韩女优制服丝袜电影| 欧美日韩二区三区| 国产日韩亚洲欧美综合| 亚洲永久精品大片| 福利电影一区二区三区| 欧美一级理论片| 一区二区激情小说| 成人app在线观看| 久久美女艺术照精彩视频福利播放| 亚洲精选视频在线| 成人久久18免费网站麻豆 | 久久激情五月激情| 在线国产电影不卡| 国产精品久久网站| 国产一区二区三区精品视频| 欧美一级夜夜爽| 亚洲国产一区二区视频| 91在线观看污| 国产精品美女久久久久久2018 | www.66久久| 久久久久久黄色| 狠狠色丁香久久婷婷综| 欧美哺乳videos| 日本网站在线观看一区二区三区| 欧美综合视频在线观看| 亚洲精品美腿丝袜| 色综合天天综合狠狠| 亚洲色图.com| 91影院在线免费观看| 国产精品高潮呻吟| 岛国精品在线播放| 国产亚洲精品资源在线26u| 狠狠色丁香婷婷综合| 久久久亚洲高清| 国产sm精品调教视频网站| 国产日韩精品一区二区三区在线| 国产精品99久久久久久久vr| 久久嫩草精品久久久精品| 久久99国产精品免费网站| 日韩久久精品一区| 国产真实乱子伦精品视频| 久久亚洲精精品中文字幕早川悠里 | 日韩欧美视频在线| 精品系列免费在线观看| 精品国产乱码久久久久久蜜臀| 精品一区二区三区视频| 国产亚洲一区二区三区| av一区二区三区黑人| 亚洲素人一区二区| 欧美亚洲图片小说| 老司机一区二区| 中文av字幕一区| 色8久久精品久久久久久蜜| 天堂一区二区在线| 久久亚洲精华国产精华液| 丁香激情综合国产| 亚洲一区二区3| 欧美大片在线观看| 91亚洲精品一区二区乱码| 亚洲成人动漫一区| 国产喷白浆一区二区三区| 色婷婷av一区二区三区软件| 日韩精品每日更新| 中文字幕免费不卡| 欧美精品乱人伦久久久久久| 国产成人亚洲综合色影视| 亚洲精品视频在线观看网站| 欧美成人精品二区三区99精品| 不卡一卡二卡三乱码免费网站| 亚洲一区二区三区四区中文字幕| 久久中文字幕电影| 在线观看视频一区二区| 国产精品亚洲一区二区三区妖精 | 国产日韩视频一区二区三区| 在线中文字幕一区| 国产精品中文字幕日韩精品| 一区二区三区视频在线看| 欧美大片一区二区| 91成人在线精品| 国产suv一区二区三区88区| 首页欧美精品中文字幕| 国产精品久99| 久久五月婷婷丁香社区| 欧美日韩一区二区三区不卡| 成人激情免费网站| 精品中文字幕一区二区| 亚洲国产综合91精品麻豆| 国产精品卡一卡二卡三| 精品久久久久久久人人人人传媒| 欧洲亚洲精品在线| 成人小视频免费在线观看| 青娱乐精品在线视频| 亚洲大片免费看| 亚洲伦在线观看| 国产精品免费网站在线观看| 欧美精品一区二区三| 5858s免费视频成人| 欧美午夜精品一区二区三区| av中文字幕不卡| 成熟亚洲日本毛茸茸凸凹| 经典一区二区三区| 日韩激情在线观看| 天天操天天综合网| 亚洲成av人影院在线观看网| 亚洲精品日日夜夜| 亚洲精品一二三| 中文字幕亚洲一区二区va在线| 久久精品男人的天堂| 2020国产精品| 欧美精品一区二区三区视频| 欧美一区二区女人| 欧美一区二区三区人| 欧美精品在线一区二区三区| 欧美在线制服丝袜| 欧美老女人在线| 7777精品伊人久久久大香线蕉的 | 中文字幕欧美激情| 国产精品久久午夜| 亚洲欧美日韩在线不卡| 亚洲精品福利视频网站| 亚洲精品视频一区| 天堂av在线一区| 麻豆久久久久久久| 国产传媒日韩欧美成人| 成人国产精品免费网站| 99国产精品国产精品毛片| 色综合色综合色综合| 欧美羞羞免费网站| 欧美一区二区在线免费播放| 欧美一区二区三区在线视频| 日韩欧美不卡在线观看视频| 国产色91在线| 亚洲男同性恋视频| 日日夜夜一区二区| 国产伦精品一区二区三区免费 | 国产精品99久久久久久久vr| 波波电影院一区二区三区| 色综合亚洲欧洲| 91精品国产91久久久久久一区二区 | 2023国产精品自拍| 国产精品第一页第二页第三页| 伊人色综合久久天天人手人婷| 午夜精品一区在线观看| 精品中文字幕一区二区| fc2成人免费人成在线观看播放| 欧美三级在线看| 久久久精品免费免费| 一区二区三区不卡在线观看| 日韩激情视频网站| 成人午夜伦理影院| 欧美日韩国产大片| 国产婷婷色一区二区三区在线| 亚洲欧美日韩综合aⅴ视频| 久久精品国产99国产| 91麻豆国产在线观看| 日韩欧美一级二级三级久久久| 国产精品乱码人人做人人爱| 日韩一区精品视频| 国产 日韩 欧美大片| 91精品婷婷国产综合久久竹菊| 中文字幕国产一区二区| 蜜臀av性久久久久蜜臀av麻豆| gogo大胆日本视频一区| 26uuu久久天堂性欧美| 亚洲综合视频在线| 成人一区在线看| 日韩精品一区在线观看| 亚洲综合一区二区三区| 国产福利一区二区| 欧美一区二区视频在线观看 | 免费在线观看视频一区| 99精品欧美一区二区三区综合在线| 欧美一区二区视频网站| 亚洲综合色网站| 色综合网站在线| 亚洲国产成人私人影院tom| 国产一区亚洲一区| 欧美一区二区在线免费播放| 一个色综合网站| 99久久国产免费看| 中文子幕无线码一区tr| 国产传媒欧美日韩成人| 精品国产91洋老外米糕| 日本欧美加勒比视频| 欧美私人免费视频| 一区二区在线观看视频在线观看| 不卡一区二区在线| 国产精品私人影院| 国产成人h网站| 欧美精彩视频一区二区三区| 国产一区 二区| 国产人久久人人人人爽| 国产精品自产自拍| 国产日韩精品一区二区浪潮av | 日本一区二区综合亚洲| 国产精品一区二区不卡| 国产日韩亚洲欧美综合| 成人aaaa免费全部观看| 亚洲视频图片小说|