亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來(lái)到蟲(chóng)蟲(chóng)下載站! | ?? 資源下載 ?? 資源專(zhuān)輯 ?? 關(guān)于我們
? 蟲(chóng)蟲(chóng)下載站

?? hypothesis.cpp

?? 解碼器是基于短語(yǔ)的統(tǒng)計(jì)機(jī)器翻譯系統(tǒng)的核心模塊
?? CPP
?? 第 1 頁(yè) / 共 2 頁(yè)
字號(hào):
#include "Hypothesis.h"
#include <cmath>
#include <cctype>
#include <iterator>

using namespace std;


extern bool printmore;

extern VECPOOL vecHypo;
extern VECUNSED vecNotUsed;

Hypothesis::Hypothesis(double thresholdI, int stackThreshold, int nBest, double dislimit, double lmlimit, int len)
{
	threshold = log(thresholdI);
	eachStackSize = stackThreshold;
	NBEST = nBest;
	DISTORTIONLIMIT = dislimit;
	LMLimit = lmlimit;
	DISLENGHT = len;
}

bool Hypothesis::load(string inifileName)
{
	config.load(inifileName, para);
	cout << "set the parameters ... " << endl;
	enVcb = new Vocab();
	cnVcb = new Vocab();
	if ((!enVcb) || (!cnVcb)) {
		cout << "new Vocab error!" << endl;
	}
	lm = new LanguageModel(LMLimit);
	cout << "load language model from " << para.lmodel_file << endl;
	time_t lmold, lmnew;
	time(&lmold);
	if(!lm->load(para.lmodel_file, enVcb))
	{
		cout << "load language model " << para.lmodel_file << " error !" << endl;
		return 0;
	}
	time(&lmnew);
	cout << "language model load finished, it takes " << difftime(lmnew, lmold) << " seconds!" << endl;
	cout << "load phrase table from " << para.ttable_file << endl;
	to = new TransOptions(para);
	time_t toold, tonew;
	time(&toold);
	if (!to->load(para.ttable_file, lm, enVcb, cnVcb)) {
		cout << "load phrase table " << para.ttable_file << " error !" << endl;
		return 0;
	}
	time(&tonew);
	cout << "phrase table load finished, it takes "<< difftime(tonew, toold) << " seconds!" << endl;
	return 1;
}

void Hypothesis::initialize(string sentence)
{
	
	cnVcb->senToIDs(sentence, sentenceIDS);
	enVcb->unkTMP = cnVcb->unkTMP;
	stackSize = to->getEPhrase(sentenceIDS, phraseSnippet, phraseSnippetPosition);
	hypothesisStack.resize(stackSize + 1);
	THRESHOLD.resize(stackSize + 1);

	int first = findProper(vecNotUsed);
	
	vecHypo[first].lastEWI = -1;
	vecHypo[first].lastEWII = enVcb->getIndex("<s>");
	vecHypo[first].lmScore = 0.0;
	vecHypo[first].prev = -1;
	vecHypo[first].thisID = 0;
	vecHypo[first].lastID = 0;
	

	hypothesisStack[0].push_back(first); //
}

void Hypothesis::clear()
{
	//release the resource
	phraseSnippet.clear();
	phraseSnippetPosition.clear();
	THRESHOLD.clear();
	HypothesisStack::iterator	pos;
	for(pos = hypothesisStack.begin(); pos != hypothesisStack.end(); ++pos)
	{
		pos->clear();
	}
	hypothesisStack.clear();
	
	vecNotUsed.clear();
	int poolsize = vecHypo.size();
	for (int n = 0; n < poolsize; n++)
	{
		vecNotUsed.push_back(n);
	}

	arc.clear();
	sentenceIDS.clear();
}

inline void Hypothesis::split(const string& line, vector<string>& strs)
{
	istrstream ist(line.c_str());
	string w;
	while(ist>>w) strs.push_back(w);
}

double Hypothesis::lmCal(int last1, int last2, vector<int> newPhrase, int& newlast1, int& newlast2)
{
	double lmScore = 0.0;
	if (newPhrase.size() == 0) 
	{
		return 0;
	}
	else
	{	
		vector<int> vecPhrase;
		if (last1 == -1) 
		{
			vecPhrase.push_back(last2);
			int l = newPhrase.size();
			for(int i = 0; i < l; i++)
			{
				vecPhrase.push_back(newPhrase[i]);
			}
			int len = vecPhrase.size();
			newlast1 = vecPhrase[len - 2];
			newlast2 = vecPhrase[len - 1];
			int endS = enVcb->getIndex("</s>");
			if (newPhrase[0] != endS) {
				vector<int>::iterator pos = vecPhrase.end();
				for(int i = 0; i < len - 1; i++)
				{
					double lmTmp = lm->wordProb(vecPhrase) * para.weight_l;
					if(printmore) {
						cout << "\tlanguage model cost for '" << enVcb->getWord(vecPhrase[vecPhrase.size() - 1]) << "' " << lmTmp << endl; 			
					}
					lmScore += lmTmp;
					vecPhrase.erase(--pos);
				}			
			}
			else {
				lmScore = lm->wordProb(vecPhrase) * para.weight_l;
				if(printmore) {
					cout << "\tlanguage model cost for '" << enVcb->getWord(vecPhrase[vecPhrase.size() - 1]) << "' " << lmScore << endl; 			
				}
			}		
		}
		else
		{
			vecPhrase.push_back(last1);
			vecPhrase.push_back(last2);
			//			split(newPhrase, vecPhrase);
			int l = newPhrase.size();
			for(int i = 0; i < l; i++)
			{
				vecPhrase.push_back(newPhrase[i]);
			}
			int len = vecPhrase.size();
			newlast1 = vecPhrase[len - 2];
			newlast2 = vecPhrase[len - 1];
			
			int endS = enVcb->getIndex("</s>");
			if (newPhrase[0] != endS) {
				vector<int>::iterator pos = vecPhrase.end();
				for(int i = 0; i < len - 2; i++)
				{
					double lmTmp = lm->wordProb(vecPhrase) * para.weight_l;
					if(printmore) {
						cout << "\tlanguage model cost for '" << enVcb->getWord(vecPhrase[vecPhrase.size() - 1]) << "' " << lmTmp << endl; 			
					}				
					lmScore += lmTmp;
					vecPhrase.erase(--pos);
				}
				//		lmScore = lmScore + 1;
			}
			else {
				lmScore = lm->wordProb(vecPhrase) * para.weight_l;
				if (printmore){
					cout << "\tlanguage model cost for '" << enVcb->getWord(vecPhrase[vecPhrase.size() - 1]) << "' " << lmScore << endl; 			
				}
			}
		}
	}
	return lmScore;
}

double Hypothesis::fcCal(set<int> phraseID, int stackSize)
{
	set<int>::iterator pos = phraseID.begin();
	set<int>::iterator posOld = phraseID.begin();
	pos++;
	posOld++;
	int len = phraseID.size();
	double fcScore = 0.0;
	
	if (*pos != 0) {
		fcScore += to->getFutureCost(0, *pos - 1);
		posOld = pos;
		pos++;	
	}
	for(; pos != phraseID.end(); ++pos)
	{
		if(*pos - 1 > *posOld)
		{
			fcScore += to->getFutureCost(*posOld + 1, *pos - 1);
		}
		posOld = pos;
	}
	
	if (stackSize - 1 >= *posOld + 1) {
		int i = *posOld + 1;
		fcScore += to->getFutureCost(i, stackSize - 1);
	}
	return fcScore;
}

string Hypothesis::decoder(string fileName)
{
	
	for(int i = 0; i < stackSize; i++)
	{
		if ((i - 1 > 0) && (hypothesisStack[i].size() > eachStackSize)) {
			cutStack(hypothesisStack[i], eachStackSize, i);
		}
		multisetHYE::iterator posI = hypothesisStack[i].begin();
		for(; posI != hypothesisStack[i].end(); ++posI)
		{
			HypothesisElement hyp = vecHypo[*posI];
			
			int t = hyp.oldphrase.size();
			set<int> setTmp;
			
			if (t > 0) {
				setTmp = hyp.oldphrase;
			}
			int len = phraseSnippet.size();
			for(int j = 0; j < len; j++)
			{	
				
				fPosition posTmp = phraseSnippetPosition[j];
				if((setTmp.find(posTmp.start) == setTmp.end()) && (setTmp.find(posTmp.end) == setTmp.end()))
				{
					candiPhrase::iterator pos;
					for(pos = phraseSnippet[j]->begin(); pos != phraseSnippet[j]->end(); ++pos)
					{
						aboutEPhrase onePhrase = **pos;
					//	HypothesisElement *newhyp = new HypothesisElement();
						int newhyp = findProper(vecNotUsed);
						if (newhyp == -1) {
							int oldsize = vecHypo.size();
							vecHypo.resize(2 * oldsize);
							for (int n = oldsize; n < 2 * oldsize; n++)
							{
								vecNotUsed.push_back(n);
							}
						//	vecNotUsed.resize(2 * oldsize);
							newhyp = oldsize;
						}
					
						
						vecHypo[newhyp].thisID = ++HypothesisElement::baseID;
						
						vecHypo[newhyp].lastID = hyp.thisID;
						
						vecHypo[newhyp].baseScore = hyp.totalScore - hyp.futureScore;
						vecHypo[newhyp].transScore = onePhrase.pC;
						vecHypo[newhyp].newPhrase = onePhrase.ephrase;
						int translationCost = abs(hyp.lastPos.end + 1 - posTmp.start);
						if ((DISLENGHT == 0) || (translationCost <= DISLENGHT)) {
							vecHypo[newhyp].distortionScore = translationCost * para.weight_d * (-1);
						}
						else 
						{
							vecHypo[newhyp].distortionScore = DISTORTIONLIMIT;//log(0.1)
						}
						
						vecHypo[newhyp].wordsPenalty = onePhrase.ephrase.size() * para.word_penalty * (-1);
						vecHypo[newhyp].lastPos = posTmp;
						
						vecHypo[newhyp].oldphrase = hyp.oldphrase;
						if(printmore) {
							cout << "creating hypothesis " << vecHypo[newhyp].thisID << " from "<< vecHypo[newhyp].lastID << endl;
							cout << "\tbase score " << vecHypo[newhyp].baseScore << endl;
							cout << "\ttranslation cost " << vecHypo[newhyp].transScore << endl;
							cout << "\tdistortion cost " << vecHypo[newhyp].distortionScore << endl;
							
						}
						
						for(int t = posTmp.start; t <= posTmp.end; t++)
						{
							vecHypo[newhyp].oldphrase.insert(t);
						}
						
						vecHypo[newhyp].lmScore = lmCal(hyp.lastEWI, hyp.lastEWII, onePhrase.ephrase, vecHypo[newhyp].lastEWI, vecHypo[newhyp].lastEWII);
		
						if (vecHypo[newhyp].oldphrase.size() == stackSize + 1) {//擴(kuò)展完所有的外文詞后,在英文句尾添加</s>
							int str1, str2;          //加1是因?yàn)橹安迦肓艘粋€(gè)-1
							int endofsent = enVcb->getIndex("</s>");
							vector<int> vecSentEnd;
							vecSentEnd.push_back(endofsent);
							double tail = lmCal(vecHypo[newhyp].lastEWI, vecHypo[newhyp].lastEWII, vecSentEnd, str1, str2) ;				
							vecHypo[newhyp].lmScore +=  tail;						
						}
						vecHypo[newhyp].futureScore = fcCal(vecHypo[newhyp].oldphrase, stackSize);
						
						vecHypo[newhyp].totalScore = vecHypo[newhyp].baseScore + vecHypo[newhyp].transScore + vecHypo[newhyp].distortionScore + vecHypo[newhyp].lmScore
							+ vecHypo[newhyp].wordsPenalty + vecHypo[newhyp].futureScore;
						if(printmore) {
							cout << "\tword penalty " << vecHypo[newhyp].wordsPenalty << endl;
							cout << "\tscore " << vecHypo[newhyp].totalScore - vecHypo[newhyp].futureScore << " + futureCost " << vecHypo[newhyp].futureScore << " = " << vecHypo[newhyp].totalScore << endl;
						}
						//假設(shè)入棧 						
						vecHypo[newhyp].prev = *posI;
						recombineAndbeam(newhyp);
					}		
				}
			}
		}
	}
	if(printmore) {
		cout << "decode finished !" << endl;
	}
	if (NBEST == 1) {
		return findBest();
	}
	else {
		string suffix = ".";
		int itmp = stackSize;
		while (itmp--) {
			suffix += "0";
		}
		suffix = fileName + suffix;
		findNBest(suffix);
		return suffix;
	}
}

void Hypothesis::recombineAndbeam(int newHyp)
{
	int len = vecHypo[newHyp].oldphrase.size() - 1;//which stack to input
	int stackIsize = hypothesisStack[len].size();
	if (stackIsize != 0) 
	{
		if (vecHypo[newHyp].totalScore - THRESHOLD[len] + threshold > avs) {
			THRESHOLD[len] = vecHypo[newHyp].totalScore + threshold;
			if(printmore) {
				cout << "new best estimate for this stack" << endl;
			}
		}
		if (vecHypo[newHyp].totalScore - THRESHOLD[len] > avs) 
		{
			//THRESHOLD = newHyp.totalScore + threshold;
			multisetHYE::iterator POS = hypothesisStack[len].begin();

?? 快捷鍵說(shuō)明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产午夜精品一区二区三区嫩草| 日韩精品中文字幕一区| 日韩精品亚洲一区| 久久久精品黄色| 欧美男女性生活在线直播观看| 精品一区二区三区不卡| 亚洲一区二区精品3399| 国产日韩欧美精品电影三级在线| 欧美日韩三级在线| 成人av资源下载| 精东粉嫩av免费一区二区三区| 亚洲香蕉伊在人在线观| 最新国产成人在线观看| 久久久精品蜜桃| 日韩美女在线视频| 欧美日韩极品在线观看一区| 成人sese在线| 粉嫩蜜臀av国产精品网站| 麻豆免费精品视频| 亚洲va欧美va人人爽| 亚洲欧美一区二区三区国产精品| 久久久久久免费网| 欧美一区二区三区人| 欧美性大战久久久久久久蜜臀 | 国产一区二区三区综合| 亚洲成av人片| 亚洲综合偷拍欧美一区色| 日韩一区在线播放| 中文字幕免费观看一区| 久久久久成人黄色影片| 精品国产sm最大网站| 日韩欧美国产wwwww| 在线观看91精品国产麻豆| 欧美日韩午夜在线| 欧美日本不卡视频| 欧美久久久久中文字幕| 欧美日韩在线三级| 欧美美女黄视频| 欧美剧情电影在线观看完整版免费励志电影 | 激情综合色综合久久| 美国十次综合导航| 日本不卡一二三| 日本sm残虐另类| 麻豆精品在线播放| 精彩视频一区二区三区| 国产乱人伦偷精品视频不卡| 国产精品一区在线| 成人精品鲁一区一区二区| 国产盗摄视频一区二区三区| 成人一区在线观看| 99视频在线观看一区三区| 99视频精品全部免费在线| 91小视频免费观看| 欧美性极品少妇| 7777精品伊人久久久大香线蕉完整版| 欧美剧在线免费观看网站 | 99精品视频一区二区| 91丝袜美腿高跟国产极品老师| 一本色道亚洲精品aⅴ| 欧美中文字幕一区二区三区| 欧美美女喷水视频| 精品粉嫩aⅴ一区二区三区四区| 久久午夜老司机| 最近日韩中文字幕| 亚洲777理论| 久久99精品久久久久婷婷| 国产精品亚洲第一区在线暖暖韩国 | 亚洲人成精品久久久久| 亚洲狠狠丁香婷婷综合久久久| 天天综合日日夜夜精品| 久久福利资源站| 福利一区二区在线| 欧美日韩中字一区| 亚洲精品一区二区三区精华液| 亚洲国产精品成人久久综合一区| 亚洲精品你懂的| 精品一区二区三区久久| 97久久精品人人澡人人爽| 欧美日韩国产一级片| www亚洲一区| 亚洲一区二区三区中文字幕在线| 久久成人免费日本黄色| 色综合中文字幕| 欧美成va人片在线观看| 亚洲特级片在线| 精品一区二区影视| 91久久一区二区| 久久亚洲精品国产精品紫薇| 一区二区三区欧美久久| 国产精品资源网| 欧美精品粉嫩高潮一区二区| 国产精品美女视频| 免费精品视频在线| 91浏览器入口在线观看| 欧美成人艳星乳罩| 亚洲自拍偷拍图区| 福利视频网站一区二区三区| 欧美一级在线视频| 亚洲精品国产精品乱码不99| 国产一区二区三区在线观看免费视频| 欧美亚洲一区三区| 国产精品色哟哟| 久久99精品国产麻豆不卡| 欧美亚洲精品一区| 国产欧美一区二区精品秋霞影院| 首页欧美精品中文字幕| 91在线视频免费91| 国产日韩欧美制服另类| 蜜桃久久久久久久| 欧美日韩国产综合一区二区 | 97精品国产97久久久久久久久久久久| 亚洲最大色网站| 高清在线不卡av| 久久久亚洲综合| 国精产品一区一区三区mba视频| 欧美日韩精品二区第二页| 亚洲精品午夜久久久| 成人黄色av网站在线| 久久先锋资源网| 狠狠色丁香久久婷婷综合丁香| 欧美久久免费观看| 亚洲午夜电影在线观看| 91久久一区二区| 亚洲精品视频一区| 99久久精品99国产精品| 国产精品久久久久久久久搜平片 | 日韩精品一区二区三区在线| 日本sm残虐另类| 日韩视频在线你懂得| 日韩精品国产精品| 91精品国产综合久久久蜜臀粉嫩| 香蕉久久夜色精品国产使用方法 | 日韩欧美在线综合网| 日韩精品成人一区二区在线| 欧美精品久久一区二区三区| 亚洲成av人影院| 3atv一区二区三区| 日本v片在线高清不卡在线观看| 欧美精品久久一区| 免费观看日韩av| 精品三级在线看| 国产一区二区女| 国产精品色呦呦| 色婷婷国产精品久久包臀| 亚洲最大成人网4388xx| 欧美日韩dvd在线观看| 婷婷中文字幕一区三区| 国产精品123| 色伊人久久综合中文字幕| 亚洲人成7777| 欧洲激情一区二区| 午夜一区二区三区在线观看| 91精品国产高清一区二区三区| 蜜乳av一区二区三区| 久久这里只有精品视频网| 国产大片一区二区| 亚洲欧洲中文日韩久久av乱码| 91国偷自产一区二区三区观看 | 亚洲成在人线在线播放| 欧美精品aⅴ在线视频| 蜜臀久久久99精品久久久久久| 日韩精品一区二区三区在线| 成人精品国产一区二区4080| 一区二区三区在线播放| 欧美一区二区三区公司| 国产盗摄视频一区二区三区| 一区二区三区久久久| 日韩一区二区三区在线| 国产激情一区二区三区四区 | 亚洲一区中文日韩| 日韩午夜激情视频| 成人黄色av网站在线| 亚洲成人动漫在线免费观看| 26uuu精品一区二区| 色婷婷久久久综合中文字幕| 免费的国产精品| 亚洲欧洲av一区二区三区久久| 欧美人体做爰大胆视频| 国产.欧美.日韩| 日韩中文字幕91| ㊣最新国产の精品bt伙计久久| 欧美视频完全免费看| 国产露脸91国语对白| 亚洲一区在线观看网站| 蜜乳av一区二区| 日韩美女精品在线| 欧美va在线播放| 欧洲激情一区二区| 国产69精品久久777的优势| 性久久久久久久| 亚洲天天做日日做天天谢日日欢| 欧美一区二区视频在线观看2020| 不卡的看片网站| 精品写真视频在线观看| 亚洲成人在线观看视频| 国产精品久久久久久福利一牛影视| 欧美卡1卡2卡| 91久久精品一区二区三| 国产aⅴ综合色| 久久成人羞羞网站|