亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? nbest-lattice.cc

?? 這是一款很好用的工具包
?? CC
?? 第 1 頁 / 共 2 頁
字號(hào):
/*
 * nbest-lattice --
 *	Build and rerank N-Best lattices and confusion networks
 */

#ifndef lint
static char Copyright[] = "Copyright (c) 1995-2006 SRI International.  All Rights Reserved.";
static char RcsId[] = "@(#)$Id: nbest-lattice.cc,v 1.82 2006/01/09 17:53:16 stolcke Exp $";
#endif

#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <locale.h>
#include <assert.h>
#include <math.h>

#include "option.h"
#include "version.h"
#include "File.h"
#include "zio.h"

#include "Prob.h"
#include "Vocab.h"
#include "NBest.h"
#include "NullLM.h"
#include "WordLattice.h"
#include "WordMesh.h"
#include "WordAlign.h"
#include "VocabMultiMap.h"
#include "RefList.h"
#include "Array.cc"

#define DEBUG_ERRORS		1
#define DEBUG_POSTERIORS	2

/*
 * Pseudo-posterior used to prime lattice with centroid hyp
 */
const Prob primePosterior = 100.0;

/*
 * default value for posterior* weights to indicate they haven't been set
 */
const double undefinedWeight = HUGE_VAL;

static int version = 0;
static unsigned debug = 0;
static int werRescore = 0;
static unsigned maxRescore = 0;
static char *vocabFile = 0;
static char *writeVocabFile = 0;
static int toLower = 0;
static int multiwords = 0;
static char *readFile = 0;
static char *writeFile = 0;
static char *writeDir = 0;
static char *rescoreFile = 0;
static int computeNbestError = 0;
static int computeLatticeError = 0;
static char *nbestFiles = 0;
static char *latticeFiles = 0;
static char *writeNbestFile = 0;
static char *writeNbestDir = 0;
static int writeDecipherNbest = 0;
static unsigned maxNbest = 0;
static double rescoreLMW = 8.0;
static double rescoreWTW = 0.0;
static double posteriorScale = 0.0;
static double posteriorAMW = 1.0;
static double posteriorLMW = undefinedWeight;
static double posteriorWTW = undefinedWeight;
static char *noiseTag = 0;
static char *noiseVocabFile = 0;
static int keepNoise = 0;
static int noMerge = 0;
static int noReorder = 0;
static double postPrune = 0.0;
static int primeLattice = 0;
static int primeWith1best = 0;
static int primeWithRefs = 0;
static int noViterbi = 0;
static int useMesh = 0;
static char *dictFile = 0;
static char *hiddenVocabFile = 0;
static double deletionBias = 1.0;
static int dumpPosteriors = 0;
static char *refString = 0;
static char *refFile = 0;
static int dumpErrors = 0;
static int recordHypIDs = 0;
static int nbestBacktrace = 0;
static int outputCTM = 0;
static int noRescore = 0;

static Option options[] = {
    { OPT_TRUE, "version", &version, "print version information" },
    { OPT_UINT, "debug", &debug, "debugging level" },
    { OPT_STRING, "vocab", &vocabFile, "vocab file" },
    { OPT_TRUE, "tolower", &toLower, "map vocabulary to lowercase" },
    { OPT_TRUE, "multiwords", &multiwords, "split multiwords in N-best hyps" },
    { OPT_TRUE, "wer", &werRescore, "optimize expected WER using N-best list" },
    { OPT_FALSE, "lattice-wer", &werRescore, "optimize expected WER using lattice" },
    { OPT_STRING, "read", &readFile, "lattice file to read" },
    { OPT_STRING, "write", &writeFile, "lattice file to write" },
    { OPT_STRING, "write-dir", &writeDir, "lattice directory to write to" },

    { OPT_STRING, "rescore", &rescoreFile, "hyp stream input file to rescore" },
    { OPT_TRUE, "nbest-error", &computeNbestError, "compute n-best error" },
    { OPT_TRUE, "lattice-error", &computeLatticeError, "compute lattice error" },
    { OPT_STRING, "nbest", &rescoreFile, "same as -rescore" },
    { OPT_STRING, "write-nbest", &writeNbestFile, "output n-best list" },
    { OPT_STRING, "write-nbest-dir", &writeNbestDir, "output n-best directory" },
    { OPT_STRING, "write-vocab", &writeVocabFile, "output n-best vocabulary" },
    { OPT_TRUE, "decipher-nbest", &writeDecipherNbest, "output Decipher n-best format" },
    { OPT_STRING, "nbest-files", &nbestFiles, "list of n-best filenames" },
    { OPT_STRING, "lattice-files", &latticeFiles, "list of lattice filenames to merge with main lattice" },
    { OPT_UINT, "max-nbest", &maxNbest, "maximum number of hyps to consider" },
    { OPT_UINT, "max-rescore", &maxRescore, "maximum number of hyps to rescore" },
    { OPT_FLOAT, "posterior-prune", &postPrune, "ignore n-best hyps whose cumulative posterior mass is below threshold" },
    { OPT_FLOAT, "rescore-lmw", &rescoreLMW, "rescoring LM weight" },
    { OPT_FLOAT, "rescore-wtw", &rescoreWTW, "rescoring word transition weight" },
    { OPT_FLOAT, "posterior-scale", &posteriorScale, "divisor for log posterior estimates" },
    { OPT_FLOAT, "posterior-amw", &posteriorAMW, "posterior AM weight" },
    { OPT_FLOAT, "posterior-lmw", &posteriorLMW, "posterior LM weight" },
    { OPT_FLOAT, "posterior-wtw", &posteriorWTW, "posterior word transition weight" },
    { OPT_TRUE, "keep-noise", &keepNoise, "do not eliminate pause and noise tokens" },
    { OPT_TRUE, "nbest-backtrace", &nbestBacktrace, "read backtrace info from N-best lists" },
    { OPT_TRUE, "output-ctm", &outputCTM, "output decoded words in CTM format" },
    { OPT_STRING, "noise", &noiseTag, "noise tag to skip" },
    { OPT_STRING, "noise-vocab", &noiseVocabFile, "noise vocabulary to skip" },
    { OPT_TRUE, "no-merge", &noMerge, "don't merge hyps for lattice building" },
    { OPT_TRUE, "no-reorder", &noReorder, "don't reorder N-best hyps before rescoring" },
    { OPT_TRUE, "prime-lattice", &primeLattice, "initialize word lattice with WE-minimized hyp" },
    { OPT_TRUE, "prime-with-1best", &primeWith1best, "initialize word lattice with 1-best hyp" },
    { OPT_TRUE, "prime-with-refs", &primeWithRefs, "initialize word lattice with reference hyp" },
    { OPT_TRUE, "no-viterbi", &noViterbi, "minimize lattice WE without Viterbi search" },
    { OPT_TRUE, "use-mesh", &useMesh, "align using word mesh (not lattice)" },
    { OPT_STRING, "dictionary", &dictFile, "dictionary to use in mesh alignment" },
    { OPT_STRING, "hidden-vocab", &hiddenVocabFile, "subvocabulary to be kept separate in mesh alignment" },
    { OPT_FLOAT, "deletion-bias", &deletionBias, "bias factor in favor of deletions" },
    { OPT_TRUE, "dump-posteriors", &dumpPosteriors, "output hyp and word posteriors probs" },
    { OPT_TRUE, "dump-errors", &dumpErrors, "output word error labels" },
    { OPT_TRUE, "record-hyps", &recordHypIDs, "record hyp IDs in lattice" },
    { OPT_TRUE, "no-rescore", &noRescore, "suppress lattice rescoring" },
    { OPT_STRING, "reference", &refString, "reference words" },
    { OPT_STRING, "refs", &refFile, "reference transcript file" }
};

/*
 * Output hypotheses in CTM format
 */
static void
printCTM(Vocab &vocab, const NBestWordInfo *winfo, const char *name)
{
    for (unsigned i = 0; winfo[i].word != Vocab_None; i ++) {
	cout << name << " 1 ";
	if (winfo[i].valid()) {
	    cout << winfo[i].start << " " << winfo[i].duration;
	} else {
	    cout << "? ?";
	}
	cout << " " << vocab.getWord(winfo[i].word)
	     << " " << winfo[i].wordPosterior << endl;
    }
}

void
latticeRescore(const char *sentid, MultiAlign &lat, NBestList &nbestList,
						const VocabIndex *reference)
{
    unsigned totalWords = 0;
    unsigned numHyps = nbestList.numHyps();

    if (!noReorder) {
    	if (rescoreLMW != 0.0 || rescoreWTW != 0.0) {
	    nbestList.reweightHyps(rescoreLMW, rescoreWTW);
	}
	nbestList.sortHyps();
    }

    nbestList.computePosteriors(posteriorLMW, posteriorWTW, posteriorScale,
								posteriorAMW);

    unsigned howmany = (maxRescore > 0) ? maxRescore : numHyps;
    if (howmany > numHyps) {
	howmany = numHyps;
    }

    Prob totalPost = 0.0;
    VocabIndex *primeWords = 0;

    /* 
     * Prime lattice with a "good hyp" to improve alignments
     */
    if (primeLattice && !noMerge && lat.isEmpty()) {
	primeWords = new VocabIndex[maxWordsPerLine + 1];
	assert(primeWords != 0);

	if (primeWith1best) {
	    /*
	     * prime with 1-best hyp
	     */
	    nbestList.reweightHyps(rescoreLMW, rescoreWTW);

	    /*
	     * locate best hyp
	     */
	    VocabIndex *bestHyp;
	    LogP bestScore;
	    for (unsigned i = 0; i < howmany; i ++) {
		NBestHyp &hyp = nbestList.getHyp(i);

		if (i == 0 || hyp.totalScore > bestScore) {
		    bestHyp = hyp.words;
		    bestScore = hyp.totalScore;
		}
	    }

	    Vocab::copy(primeWords, bestHyp);
	} else if (primeWithRefs) {
	    if (reference) {
	        Vocab::copy(primeWords, reference);
	    } else {
		cerr << sentid << " has no reference -- not priming lattice\n";
		delete [] primeWords;
		primeWords = 0;
	    }
	} else {
	    /*
	     * prime with WE-minimized hyp -- slow!
	     */
	    double subs, inss, dels;
	    (void)nbestList.minimizeWordError(primeWords, maxWordsPerLine + 1,
				    subs, inss, dels, maxRescore, postPrune);
	    primeWords[maxWordsPerLine] = Vocab_None;
	}

	if (primeWords) {
	    lat.addWords(primeWords, primePosterior);
	}
    }

    /*
     * Incorporate hyps into lattice
     */
    for (unsigned i = 0; i < howmany; i ++) {
	NBestHyp &hyp = nbestList.getHyp(i);
	HypID hypID = hyp.rank;
	HypID *hypIDPtr = recordHypIDs ? &hypID : 0;

	/*
	 * Check for overflow in the hypIDs
	 */
	if (recordHypIDs && ((unsigned)hypID != hyp.rank || hypID == refID)) {
	    cerr << "Sorry, too many hypotheses in N-best list "
		 << (sentid ? sentid : "") << endl;
	    exit(2);
	}

	totalWords += Vocab::length(hyp.words);

	/*
	 * If merging is turned off or the lattice is empty (only
	 * initial/final nodes) we add fresh path to it.
	 * Otherwise merge using string alignment.
	 */
	
	if (noMerge || lat.isEmpty()) {
	    if (hyp.wordInfo) {
		lat.addWords(hyp.wordInfo, hyp.posterior, hypIDPtr);
	    } else {
		lat.addWords(hyp.words, hyp.posterior, hypIDPtr);
	    }
	} else {
	    if (hyp.wordInfo) {
		lat.alignWords(hyp.wordInfo, hyp.posterior, 0, hypIDPtr);
	    } else {
		lat.alignWords(hyp.words, hyp.posterior, 0, hypIDPtr);
	    }
	}

	/*
	 * Ignore hyps whose cummulative posterior mass is below threshold
	 */
	totalPost += hyp.posterior;
	if (postPrune > 0.0 && totalPost > 1.0 - postPrune) {
	    break;
	}
    }

    /*
     * Remove posterior mass due to priming
     */
    if (primeWords) {
	lat.addWords(primeWords, - primePosterior);
	delete [] primeWords;
    }

    if (dumpPosteriors) {
	/*
	 * Dump hyp posteriors, followed by word posteriors
	 */
	for (unsigned i = 0; i < howmany; i ++) {
	    NBestHyp &hyp = nbestList.getHyp(i);

	    unsigned hypLength = Vocab::length(hyp.words);

	    makeArray(Prob, posteriors, hypLength);

	    lat.alignWords(hyp.words, 0.0, posteriors);

	    if (sentid) cout << sentid << ":" << i << " ";
	    cout << hyp.posterior;
	    for (unsigned j = 0; j < hypLength; j ++) {
		cout << " " << posteriors[j];
	    }
	    cout << endl;
	}
    } else if (!dumpErrors) {
	/*
	 * Recover best hyp from lattice
	 */
	unsigned flags = 0;
	if (noViterbi) {
	    flags |= WORDLATTICE_NOVITERBI;
	}
	 
	if (outputCTM) {
	    NBestWordInfo *bestWords = new NBestWordInfo[maxWordsPerLine + 1];
	    assert(bestWords != 0);
	    double subs, inss, dels, errors;

	    errors = lat.minimizeWordError(bestWords, maxWordsPerLine + 1,
				      subs, inss, dels, flags, deletionBias);
	    bestWords[maxWordsPerLine].word = Vocab_None;

	    printCTM(lat.vocab, bestWords, sentid ? sentid : "???");

	    delete [] bestWords;

	    if (debug >= DEBUG_ERRORS) {
		if (sentid) cerr << sentid << " ";
		cerr << "err " << errors << " sub " << subs
		     << " ins " << inss << " del " << dels << endl;
	    }
	} else {
	    VocabIndex bestWords[maxWordsPerLine + 1];
	    double subs, inss, dels, errors;

	    errors = lat.minimizeWordError(bestWords, maxWordsPerLine + 1,
				      subs, inss, dels, flags, deletionBias);
	    bestWords[maxWordsPerLine] = Vocab_None;

	    if (sentid) cout << sentid << " ";
	    cout << (lat.vocab.use(), bestWords) << endl;

	    if (debug >= DEBUG_ERRORS) {
		if (sentid) cerr << sentid << " ";
		cerr << "err " << errors << " sub " << subs
		     << " ins " << inss << " del " << dels << endl;
	    }

	    if (debug >= DEBUG_POSTERIORS) {
		unsigned numWords = Vocab::length(bestWords);
		makeArray(Prob, posteriors, numWords);

		lat.alignWords(bestWords, 0.0, posteriors);

		if (sentid) cerr << sentid << " ";
		cerr << "post";
		for (unsigned j = 0; j < numWords; j ++) {
		    cerr << " " << posteriors[j];
		}
		cerr << endl;
	    }
	}
    }
}

void
wordErrorRescore(const char *sentid, NBestList &nbestList)
{
    unsigned numHyps = nbestList.numHyps();
    unsigned howmany = (maxRescore > 0) ? maxRescore : numHyps;
    if (howmany > numHyps) {
	howmany = numHyps;
    }

    if (!noReorder) {
    	if (rescoreLMW != 0.0 || rescoreWTW != 0.0) {
	    nbestList.reweightHyps(rescoreLMW, rescoreWTW);
	}
	nbestList.sortHyps();
    }

    nbestList.computePosteriors(posteriorLMW, posteriorWTW, posteriorScale,
								posteriorAMW);

    if (dumpPosteriors) {
	/*
	 * Dump hyp posteriors
	 */
	for (unsigned i = 0; i < howmany; i ++) {
	    if (sentid) cout << sentid << ":" << i << " ";
	    cout << nbestList.getHyp(i).posterior << endl;
	}
    } else if (!dumpErrors) {
	VocabIndex bestWords[maxWordsPerLine + 1];

	double subs, inss, dels;
	double errors = nbestList.minimizeWordError(bestWords,
				    maxWordsPerLine + 1,
				    subs, inss, dels, maxRescore, postPrune);
	bestWords[maxWordsPerLine] = Vocab_None;

	if (sentid) cout << sentid << " ";
	cout << (nbestList.vocab.use(), bestWords) << endl;

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
久久国产夜色精品鲁鲁99| 亚洲天堂免费看| 一本大道久久a久久精二百| 日本麻豆一区二区三区视频| 中文字幕日韩精品一区| 日韩精品一区二区三区四区| 一本色道久久加勒比精品| 韩国精品一区二区| 日本中文字幕一区二区视频| 亚洲人吸女人奶水| 国产精品麻豆一区二区| 日韩欧美一级精品久久| 欧美日韩dvd在线观看| av电影在线观看一区| 日本高清成人免费播放| 国产福利视频一区二区三区| 欧美—级在线免费片| 色综合网色综合| 麻豆精品在线看| 日韩伦理电影网| 欧美一级片在线观看| 色综合咪咪久久| 国模一区二区三区白浆| 一区二区三区免费在线观看| 国产香蕉久久精品综合网| 欧美性高清videossexo| 高潮精品一区videoshd| 日韩国产在线观看一区| 亚洲日本va在线观看| 国产日韩精品一区| eeuss国产一区二区三区| 日日噜噜夜夜狠狠视频欧美人 | 亚洲国产色一区| 亚洲欧美日韩一区二区三区在线观看| 欧美激情一区二区三区全黄| 国产精品一品视频| 国产精品一二三| 福利电影一区二区三区| 日本午夜一本久久久综合| 久久新电视剧免费观看| 欧美色图片你懂的| 99在线精品一区二区三区| 免费成人你懂的| 亚洲va在线va天堂| 国产精品午夜在线| 久久久av毛片精品| 欧美刺激脚交jootjob| 中文一区在线播放| 欧美成人乱码一区二区三区| 欧美午夜精品久久久久久超碰| 成人高清免费观看| 国产成人在线视频免费播放| 黄色日韩网站视频| 韩国理伦片一区二区三区在线播放| 五月天久久比比资源色| 夜夜操天天操亚洲| 亚洲国产欧美另类丝袜| 亚洲综合无码一区二区| 亚洲电影一级黄| 调教+趴+乳夹+国产+精品| 一级特黄大欧美久久久| 亚洲综合成人在线视频| 亚洲一区二区不卡免费| 亚洲综合区在线| 三级久久三级久久久| 蜜桃视频一区二区三区在线观看| 日本特黄久久久高潮| 国产真实乱偷精品视频免| 国产精品综合久久| 99久免费精品视频在线观看 | 日韩美女视频19| 亚洲日本青草视频在线怡红院| 亚洲欧美精品午睡沙发| 亚洲国产精品一区二区www | 一本色道亚洲精品aⅴ| 91国产免费看| 精品国产一区二区三区四区四 | 日韩欧美在线综合网| 2021国产精品久久精品| 国产精品免费久久| 一区二区三区在线影院| 亚洲欧美一区二区久久| 亚洲综合在线视频| 亚洲欧美日韩系列| 久久夜色精品国产欧美乱极品| 欧美日韩一本到| 欧美成人精品3d动漫h| 久久久www成人免费毛片麻豆 | 综合自拍亚洲综合图不卡区| 国产精品久久久久久妇女6080| 综合婷婷亚洲小说| 亚洲成人综合在线| 久久99精品久久久久| av不卡在线观看| 在线不卡欧美精品一区二区三区| 日韩一区二区免费在线电影| 国产欧美精品一区二区色综合| 亚洲影院在线观看| 日欧美一区二区| 极品少妇xxxx偷拍精品少妇| 不卡区在线中文字幕| 欧美群妇大交群中文字幕| 精品视频在线视频| 亚洲国产电影在线观看| 亚洲一区二区三区精品在线| 欧美一二三四区在线| 中文乱码免费一区二区 | 成人不卡免费av| 欧美精品电影在线播放| 欧美激情在线免费观看| 亚洲在线视频免费观看| 成人性生交大片免费看在线播放| 一本大道综合伊人精品热热 | 一区二区三区四区在线播放 | av亚洲精华国产精华| 久久av中文字幕片| 欧美色视频在线| ...av二区三区久久精品| 日韩精品成人一区二区在线| 久久色中文字幕| 五月天久久比比资源色| 色狠狠一区二区| 亚洲另类在线视频| 色综合一个色综合| 亚洲综合999| 337p亚洲精品色噜噜噜| 三级在线观看一区二区| 制服丝袜激情欧洲亚洲| 日本不卡中文字幕| 精品三级av在线| 国产河南妇女毛片精品久久久| 欧美电影免费观看高清完整版| 精品一区二区三区免费毛片爱| 国产亚洲欧美日韩在线一区| 成人免费av网站| 同产精品九九九| 欧美精品一区二区三区高清aⅴ | 欧美精品一区在线观看| 国产91丝袜在线播放| 亚洲精品成人精品456| 91福利精品第一导航| 婷婷一区二区三区| 久久九九久久九九| 91成人国产精品| 国产永久精品大片wwwapp| 亚洲精品一二三区| 日韩欧美三级在线| 成人免费va视频| 日本va欧美va精品发布| 亚洲欧美日韩一区| 国产亚洲一区二区三区| 欧美裸体bbwbbwbbw| 国产99久久久精品| 99re在线精品| 日本在线观看不卡视频| 国产精品的网站| 国产午夜精品美女毛片视频| 欧美午夜电影网| 91国模大尺度私拍在线视频| 精品一区二区在线免费观看| 亚洲欧洲国产日本综合| wwwwww.欧美系列| 欧美日韩精品欧美日韩精品一| 福利一区在线观看| 精品亚洲成a人在线观看 | 黑人巨大精品欧美一区| 亚洲午夜免费电影| 亚洲欧美色综合| 国产欧美精品一区| 久久久久久久久久久久久久久99| 欧美日韩国产色站一区二区三区| 97成人超碰视| 成人免费视频一区| 狠狠色丁香婷综合久久| 精品一区二区在线免费观看| 轻轻草成人在线| 久久99久久久久久久久久久| 天堂va蜜桃一区二区三区| 日韩av电影天堂| 亚洲一区欧美一区| 紧缚奴在线一区二区三区| 男女男精品视频| 久久精品国产一区二区| 日韩av高清在线观看| 日韩福利电影在线| 亚洲成人tv网| 天堂蜜桃一区二区三区 | 不卡大黄网站免费看| 国产高清在线精品| 成人黄色电影在线| 国产a级毛片一区| 波多野结衣中文字幕一区 | 成人深夜视频在线观看| av一区二区三区在线| 色婷婷综合激情| 欧美一级片免费看| 久久色.com| 亚洲国产精品久久久久婷婷884| 日本午夜一本久久久综合| 精品无人码麻豆乱码1区2区|