亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? gibbssampler.cpp

?? gibbs
?? CPP
?? 第 1 頁 / 共 2 頁
字號:
#include "GibbsSampler.h"#include "VarConfig.h"#include <algorithm>#include "Prob.h"#define CHECKMEM(x) \{ \    cout << "Pre-" << x << endl; \    int* foo = new int[1000]; \    cout << "Post-" << x << endl; \}#define SQR(x) ((x) * (x))#if 0// Unused -- we use the VarConfig class instead.int GibbsSampler::configIndex(VarSet& allVars, list<int>& testIndices){    int rangeProduct = 1;    int ret = 0;    list<int>::iterator i;    for (i = testIndices.begin(); i != testIndices.end(); i++) {        // Test vars must be discrete.        ret += rangeProduct * (int)allVars[*i];        rangeProduct *= model.getRange(*i);    }    return ret;}#endif// Old way of burning in -- run a constant number of iterations.void GibbsSampler::burnInChain(VarSet &chain, const VarSet& evidence,         int burnInIters) const{    for (int iter = 0; iter < burnInIters; iter++) {        for (int v = 0; v < evidence.getNumVars(); v++) {            if (!evidence.isTested(v)) {                chain[v] = model.MBsample(v, chain);            }        }    }}unsigned int GibbsSampler::sampleFromDist(const vector<double>& dist) const{    double p = (double)rand()/RAND_MAX;    for (unsigned int v = 0; v < dist.size(); v++) {        p -= dist[v];        if (p < 0.0) {            return v;        }    }    // DEBUG    cout << "Error: returning last value in sampleFromDist()\n";    return (dist.size() - 1);}double GibbsSampler::testConvergence(vector<vector<double> > summaries,        vector<vector<double> > sqSummaries, int n) const{    double maxR = 0.0;    int numChains = summaries.size();    int numSummaries = sqSummaries[0].size();    vector<double> avgSummaryVariance(numSummaries);    // Iterate through all chains    for (int c = 0; c < numChains; c++) {        // Compute within-chain variance for each summary statistic        for (int s = 0; s < numSummaries; s++) {#if 0            // HACK: We require that every single state receive at least a            // fractional count in some chain before we converge.            if (summaries[c][s] == 1.0) {                // DEBUG                cout << "c = " << c << "; s = " << s << endl;                return 1000.0;            }#endif            double chainVar = (sqSummaries[c][s] - SQR(summaries[c][s])/n)/(n-1);            avgSummaryVariance[s] += chainVar/numChains;        }    }    for (int s = 0; s < numSummaries; s++) {        // Compute between-chain variance for each summary statistic        double squareSum = 0.0;        double sum = 0.0;        for (int c = 0; c < numChains; c++) {            squareSum += SQR(summaries[c][s]/n);            sum += summaries[c][s]/n;        }        double betweenChainVariance            = (squareSum - SQR(sum)/numChains)/(numChains-1);        // Compute convergence criteria R        double R = ((n-1.0)/n*avgSummaryVariance[s]                      + betweenChainVariance)/avgSummaryVariance[s];        // Report largest convergence statistic        if (R > maxR) {            maxR = R;#if 0        // DEBUG        cout << "Counts:";        for (int c = 0; c < numChains; c++) {            cout << " " << summaries[c][s];            cout << " (" << sqSummaries[c][s] << ")";        }        cout << "\n";        cout << "Within: " << avgSummaryVariance[s];        cout << "; Between: " << betweenChainVariance;        cout << "; R: " << sqrt(R) << endl;        // END DEBUG#endif        }    }    // DEBUG    //cout << "sqrt(R) = " << sqrt(maxR) << endl;    return sqrt(maxR);}double GibbsSampler::predictIters(vector<vector<double> > summaries,        vector<vector<double> > sqSummaries, int n) const{    double maxV = 0.0;    int numChains = summaries.size();    int numSummaries = sqSummaries[0].size();    vector<double> avgSummaryVariance(numSummaries);    // Iterate through all summary statistics    for (int s = 0; s < numSummaries; s++) {        // Consider the chains as independent estimates of each summary        // statistic, and compute their standard deviation.        double squareSum = 0.0;        double sum = 0.0;        for (int c = 0; c < numChains; c++) {            squareSum += SQR(summaries[c][s]/n);            sum += summaries[c][s]/n;        }        // See page 740 of DeGroot and Schervish, 3rd ed.        double S = sqrt((squareSum - SQR(sum)/numChains)/numChains);        double sigma_hat = sqrt((double)n) * S;        // Compute number of expected iterations (with 95% certainty)        // to get the estimate correct within 5%.        // (See page 707, eqn 11.1.5 of DeGroot and Schervish, 3rd ed.)        double epsilon = 0.05 * sum/numChains;        double v = SQR(1.96 * sigma_hat/epsilon);        // Report largest number of iterations to run        if (v > maxV) {            maxV = v;#if 0            // DEBUG            cout << "epsilon = " << epsilon << endl;            cout << "sigma_hat = " << sigma_hat << endl;            cout << "n = " << n << endl;            cout << "sum = " << sum << endl;            cout << "squareSum = " << squareSum << endl;#endif#if 0            double mean = sum/numChains;            double maxRatio = 1.0;            for (int c = 0; c < numChains; c++) {                double currStat = summaries[c][s]/n;                if (currStat/mean > maxRatio) {                    maxRatio = currStat/mean;                }                if (mean/currStat > maxRatio) {                    maxRatio = mean/currStat;                }            }            cout << "Max ratio: " << maxRatio << endl;#endif        }    }    return maxV;}void GibbsSampler::runMarginalInference(const VarSet& evidence){    // We use this vector to convert var/value pairs into summary     // statistic indices.    vector<vector<int> > index(model.getNumVars());    int numSummaries = 0;    for (int v = 0; v < model.getNumVars(); v++) {        for (int val = 0; val < model.getRange(v); val++) {            index[v].push_back(numSummaries++);        }    }    // Keep track of marginal counts for all test variables    vector<vector<double> > counts(numChains, vector<double>(numSummaries));    vector<vector<double> > sqCounts(numChains, vector<double>(numSummaries));    for (int c = 0; c < numChains; c++) {        for (int s = 0; s < numSummaries; s++) {            counts[c][s] = 0.0;            sqCounts[c][s] = 0.0;            //counts[c][s] = 1.0;            //sqCounts[c][s] = 1.0;        }    }    // Initialize and burn-in all chains    vector<VarSet> chains(numChains);    for (int c = 0; c < numChains; c++) {        chains[c] = evidence;        model.wholeSample(chains[c]);        // Use a fixed number of burn-in iters, if appropriate        if (fixedIters) {            burnInChain(chains[c], evidence, burnInIters);        }    }    // Sample, sample, sample until convergence    double burnin_iter = 0;    double sampling_iter = 0;    double predicted_iters = minIters;    bool burnin_done = fixedIters;    while (1) {        if (burnin_done) {            sampling_iter++;        } else {            burnin_iter++;        }        // Sample all variables and increase counts        for (int c = 0; c < numChains; c++) {            for (int v = 0; v < model.getNumVars(); v++) {                // Don't resample evidence variables                if (evidence.isTested(v)) {                    continue;                }                                // Sample                vector<double> dist = model.MBdist(v, chains[c]);                chains[c][v] = sampleFromDist(dist);#define RAOBLACKWELL#ifdef RAOBLACKWELL                // Update (Rao-Blackwellized) counts                for (int val = 0; val < model.getRange(v); val++) {                    // Update counts using the distribution                    int s = index[v][val];                    counts[c][s] += dist[val];                    sqCounts[c][s] += dist[val]*dist[val];                }#else                // Update counts                int s = index[v][(int)chains[c][v]];                // DEBUG                //cout << "s = " << s << endl;                counts[c][s]++;                sqCounts[c][s]++;#endif            }        }        // After completing some minimum number of iterations,        // check for convergence of our burn-in period        if (!burnin_done && burnin_iter >= burnInIters                 && ((int)burnin_iter % 100 == 0)                 && (testConvergence(counts, sqCounts, (int)burnin_iter)                    < convergenceRatio)) {            // Stop burn-in            burnin_done = true;            // Throw away counts for burn-in period            for (int c = 0; c < numChains; c++) {                for (int s = 0; s < numSummaries; s++) {                    counts[c][s] = 0.0;                    sqCounts[c][s] = 0.0;                    //counts[c][s] = 1.0;                    //sqCounts[c][s] = 1.0;                }            }        }        // Test for convergence of the sampling        // Go until our standard error among the different chains is        // less than 5% of the predicted value.        if (burnin_done && sampling_iter >= predicted_iters) {            // Stop, if we're only running a fixed number of iterations            if (fixedIters) {                break;            }                        predicted_iters = predictIters(counts, sqCounts, (int)sampling_iter);            // DEBUG            cout << "Predicted iters = " << predicted_iters << endl;            if (predicted_iters <= sampling_iter) {                break;            }        }    }    // Save distributions    for (int v = 0; v < model.getNumVars(); v++) {        if (evidence.isTested(v)) {            continue;        }        Distribution m(model.getRange(v));        for (int val = 0; val < model.getRange(v); val++) {            m[val] = 0;            for (int c = 0; c < numChains; c++) {                m[val] += counts[c][index[v][val]];            }        }        m.normalize();        marginals[v] = m;#ifdef DEBUG        if (!evidence.isTested(v)) {            cout << v << ": " << m << endl;        }#endif    }    // DEBUG    if (!fixedIters) {        cout << burnin_iter << "; " << sampling_iter << endl;    }}void GibbsSampler::runJointInference(const list<int>& queryVars,        const VarSet& evidence){    VarSchema schema = model.getSchema();    VarConfig query(evidence, queryVars, schema);    int numSummaries = query.getMaxIndex() + 1;        // Keep track of counts for all test configurations    vector<vector<double> > counts(numChains, vector<double>(numSummaries));    vector<vector<double> > sqCounts(numChains, vector<double>(numSummaries));    for (int c = 0; c < numChains; c++) {        for (int s = 0; s < numSummaries; s++) {            counts[c][s] = 1.0/numSummaries;            sqCounts[c][s] = 1.0/numSummaries;

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
精品国产一二三| 国产传媒久久文化传媒| 精品视频在线免费观看| 夜色激情一区二区| 欧美日韩免费电影| 日韩av电影天堂| 精品精品欲导航| 国产一区二区精品久久| 欧美激情中文不卡| 99久久99久久久精品齐齐| 亚洲已满18点击进入久久| 欧美久久久影院| 麻豆精品视频在线观看| 久久精品人人爽人人爽| 日本韩国一区二区| 免费人成精品欧美精品| 久久久久9999亚洲精品| 色婷婷久久久亚洲一区二区三区| 亚洲在线成人精品| 欧美精品一区二区三区四区 | 日韩一区二区三区四区五区六区| 日韩精品成人一区二区三区| 亚洲精品在线观看网站| 972aa.com艺术欧美| 丝袜亚洲精品中文字幕一区| 久久久久久久久久久电影| 色狠狠桃花综合| 精品一区二区在线视频| 国产精品不卡在线观看| 欧美一区二区视频免费观看| 懂色av一区二区在线播放| 亚洲国产一区二区视频| 久久网站最新地址| 欧美日韩中文精品| 国产成人精品免费网站| 亚洲国产精品自拍| 欧美激情一区二区三区| 欧美精品欧美精品系列| 成人黄色在线看| 蜜臀av一区二区三区| 樱花影视一区二区| 久久久久一区二区三区四区| 在线观看视频一区| 国产精品亚洲第一区在线暖暖韩国| 亚洲精品国产a| 欧美激情中文字幕一区二区| 欧美一区二区三区视频免费| 色94色欧美sute亚洲13| 国产二区国产一区在线观看| 男女视频一区二区| 一区二区高清免费观看影视大全 | 轻轻草成人在线| 亚洲精品免费一二三区| 国产午夜精品一区二区三区嫩草 | 日韩电影一二三区| 亚洲精品国产无天堂网2021| 久久久久国产精品麻豆ai换脸 | 99视频在线精品| 国产一区视频导航| 美女视频黄a大片欧美| 午夜在线电影亚洲一区| 一区二区三区精品| 亚洲三级免费观看| 国产精品久久久久婷婷| 久久久午夜电影| 欧美成人a视频| 欧美一区三区四区| 欧美日韩国产一二三| 色婷婷精品大视频在线蜜桃视频 | 午夜精品久久一牛影视| 亚洲日本丝袜连裤袜办公室| 国产精品久久久久久久久免费相片 | 紧缚捆绑精品一区二区| 日韩影院在线观看| 亚洲第一会所有码转帖| 亚洲精品中文在线影院| 亚洲精品国产无天堂网2021| 亚洲三级电影网站| 日韩毛片视频在线看| 亚洲啪啪综合av一区二区三区| 国产精品免费视频观看| 国产精品成人一区二区三区夜夜夜| 国产视频一区二区在线| 国产精品午夜春色av| 国产欧美日产一区| 国产精品理论在线观看| 亚洲欧美另类久久久精品| 一区二区成人在线| 亚洲狠狠爱一区二区三区| 偷拍日韩校园综合在线| 日本三级亚洲精品| 精品亚洲aⅴ乱码一区二区三区| 久久99精品久久久久久国产越南| 韩国理伦片一区二区三区在线播放 | 免费观看在线色综合| 美女一区二区三区在线观看| 久久超级碰视频| 国产精品77777| 91色porny| 欧美肥妇free| 久久久99久久精品欧美| 国产精品久久久久久久久免费桃花 | 人人爽香蕉精品| 国产成人精品亚洲午夜麻豆| 91在线精品一区二区| 欧美午夜寂寞影院| 精品日本一线二线三线不卡| 国产精品区一区二区三区| 亚洲一区二区视频| 九一九一国产精品| av网站一区二区三区| 欧美日韩一区二区电影| 精品国产一二三| 亚洲美女淫视频| 精品一区二区三区欧美| 91麻豆免费观看| 日韩视频一区二区在线观看| 国产精品日日摸夜夜摸av| 亚洲观看高清完整版在线观看| 久久精品国产99国产精品| 波多野结衣91| 在线成人免费观看| 国产精品久久久久精k8| 日韩福利视频网| 99精品国产91久久久久久| 91精品视频网| 亚洲人被黑人高潮完整版| 毛片一区二区三区| 色婷婷综合久色| 久久久国产午夜精品| 午夜a成v人精品| av影院午夜一区| 精品国产髙清在线看国产毛片| 亚洲少妇30p| 国产精品系列在线观看| 69堂成人精品免费视频| 综合亚洲深深色噜噜狠狠网站| 久久精品国内一区二区三区| 欧美三级蜜桃2在线观看| 亚洲国产高清aⅴ视频| 欧美aⅴ一区二区三区视频| 91麻豆免费视频| 国产精品日产欧美久久久久| 久久99久久久欧美国产| 欧美日韩一区二区在线视频| 中文字幕中文字幕在线一区 | 一区二区三区四区av| 国产麻豆视频一区| 日韩女优制服丝袜电影| 亚洲成人三级小说| 色婷婷av一区二区三区大白胸| 国产香蕉久久精品综合网| 蜜臀va亚洲va欧美va天堂 | 国产亚洲精品福利| 久久不见久久见免费视频7| 欧美精品三级日韩久久| 亚洲电影一区二区三区| 91久久免费观看| 国产精品国产自产拍在线| 国产成人99久久亚洲综合精品| 精品欧美一区二区在线观看| 日韩激情一区二区| 欧美剧情电影在线观看完整版免费励志电影| 国产精品成人免费| 不卡av电影在线播放| 中文字幕av资源一区| 国产成人a级片| 国产精品天天摸av网| 国产精品主播直播| 国产日韩欧美一区二区三区综合| 韩国av一区二区三区在线观看| 精品国产自在久精品国产| 精品在线亚洲视频| 国产欧美在线观看一区| 成人a区在线观看| 亚洲欧美日韩国产手机在线 | 国产一区在线观看视频| 亚洲高清免费视频| 在线观看日韩一区| 婷婷丁香久久五月婷婷| 日韩丝袜情趣美女图片| 国产尤物一区二区在线| 欧美韩国一区二区| 成人av片在线观看| 亚洲精品国产第一综合99久久 | 久久精品72免费观看| 2023国产一二三区日本精品2022| 精品一区二区三区不卡 | 一区二区三区免费| 精品视频一区二区三区免费| 日韩黄色片在线观看| 欧美电视剧在线看免费| 国产一区免费电影| 亚洲免费三区一区二区| 欧美日本一道本在线视频| 麻豆91精品91久久久的内涵| 国产农村妇女精品| 欧美三级午夜理伦三级中视频| 久久精品久久99精品久久| 中文字幕av一区 二区|