亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? lmodel.c

?? 隱馬爾科夫模型工具箱
?? C
?? 第 1 頁 / 共 5 頁
字號:
		  x = (UShort) feStack[j]->ndx;		  WriteShort(f,(short *)&x,1,TRUE);	       }	       x = (UShort) se->ndx;	       WriteShort(f,(short *)&x,1,TRUE);	    }	    if (flags&HAS_BOWT)	       WriteFloat(f,&bowt,1,TRUE);      /* back-off weight */	 } else {             fprintf(f, "%+.4f",prob);	     fprintf(f, "\t%s",context);	     word = lm->binMap[se->ndx]->name;	     if (htkEsc)		word = ReWriteString(word,NULL,ESCAPE_CHAR);	     fprintf(f, "%s",word);	     if (has_bowt)		fprintf(f, "\t%+.4f",bowt);	     fprintf(f, "\n");	 }	 nItem++;      }   }   return nItem;}/* SaveNGram: write LM in to file f */static int SaveNGram(FILE *f, int G, BackOffLM *lm){   int total;   Byte fsize;   FLEntry *feStack[LM_NSIZE];   Boolean useIntID;   if (lm->vocSize > USHRT_MAX) {      if (sizeof(LM_Id) <= sizeof(UShort))	 HError(15445,"SaveNGram: vocSize = %d but using %d-byte IDs",		lm->vocSize, sizeof(LM_Id));      useIntID = TRUE;   } else {      useIntID = defIntID;   }   fprintf(f, "\n\\%d-grams:\n", G);   feStack[0] = &(lm->root);   total = WriteNGram(f,lm,feStack,1,G,useIntID);   if (lm->gInfo[G].fmt==LMF_BINARY) {       /* write out 2 zero bytes */      fsize = 0;      fwrite(&fsize, sizeof(unsigned char), 1, f);      fwrite(&fsize, sizeof(unsigned char), 1, f);   }   if (trace&T_SAVE)      printf("Wrote %d %d-grams\n", total, G);   return total;}/* SaveLangModel: save language model lm to fn */void SaveLangModel(char *lmFn, BackOffLM *lm){   char c=' ';   int i,n;   FILE *f;   NGramInfo *gi;   Boolean isPipe,isUltra;#ifdef HTK_CRYPT   if (lm->encrypt) {      TMP_OPEN(f,lmFn,HError(15411,"SaveLangModel: Cannot create lm file %s",lmFn));   }   else#endif   if ((f = FOpen(lmFn, LangModOFilter, &isPipe))==NULL)      HError(15411,"SaveLangModel: Unable to open output file %s",lmFn);   WriteHeaderInfo(f,lm);   fprintf(f, "\\data\\\n");   isUltra = FALSE;   for (gi=lm->gInfo+1,i=1; i<=lm->nSize; i++,gi++) {      switch (gi->fmt) {	 case LMF_TEXT:   c = '='; break;	 case LMF_BINARY: c = '~'; break;	 case LMF_ULTRA:  c = '#'; isUltra = TRUE; break;	 default:	    HError(15490,"SaveLangModel: Unknown LM file format (%d) for %d-gram",gi->fmt,i);      }      fprintf(f, "ngram %d%c%d\n",i,c,gi->nEntry);   }   if (isUltra) {#ifdef ULTRA_LM      ultraKey[KEY_LENGTH-1] = (vaxOrder && natWriteOrder) ? 1 : 0;      fprintf(f,"KEY: ");      for (i=0; i<KEY_LENGTH; i++) fprintf(f,"%02x ",ultraKey[i]);      fprintf(f,"\n");      SaveNGram(f,1,lm);      SaveUltraNGrams(f,lm);#else      HError(15490,"SaveLangModel: Ultra format LMs not supported");#endif   } else {      for (i=1; i<=lm->nSize; i++) {	 if ((n=SaveNGram(f,i,lm))!=lm->gInfo[i].nEntry) {	    HError(-15490,"SaveLangModel: %d-gram nEntry = %d, actual saved %d",		   i,lm->gInfo[i].nEntry,n);            lm->gInfo[i].nEntry = n;         }      }   }   fprintf(f, "\n\\end\\\n");#ifdef HTK_CRYPT   if (lm->encrypt) {      FILE *crf;      TMP_REWIND(f);      if ((crf = FOpen(lmFn,LangModOFilter,&isPipe)) == NULL) {	 TMP_CLOSE(f,lmFn);	 HError(15411,"SaveLangModel: Cannot create LM file %s",lmFn);      }      EncryptFile(lmFn,crf,f);      FClose(crf,isPipe);      TMP_CLOSE(f,lmFn);   }   else#endif   FClose(f,isPipe);}/*---------------------- N-gram access ---------------------- *//* EXPORT-> GetNGramProb: generic LM access V2 */float GetNGramProb(BackOffLM *lm, NameId *words, int nSize){   int i;   float prob;   SMEntry *se;   FLEntry *fe;   AccessInfo *acs;   LMProbType ptype;   char *s, sbuf[256];   static int rLev = -1;   float prob_mult = 0.0;   /* NGram probability lookup works like this:      1) We see if we're looking for a unigram and if so search for an         appropriate leaf SMEntry at the root level. If we don't find	 one then we must abort with an error at this point.      2) For other lengths we search for the path down the tree to the         FLEntry for the given history. If we don't find a full history	 path we reduce the context and call ourselves recursively.      3) If we found the context then we look at the SMEntry elements         at the FLEntry node to see if we can find our word with the	 given history. If we can then we return the stored probability	 otherwise we recursively call ourselves again with a reduced	 history, multiplying by the back-off weight associated with	 the given history (at the FLEntry node) when we return.   */   /* If we're using a class-based language model then we still get passed      a word history which must be converted into a class history */   if (lm->classLM) {      /* Retrieve word|class probability for word we want to predict */      prob_mult = ((WordProb*)(words[nSize-1]->ptr))->prob;      if (trace&T_PROB) {         if (lm->probType & LMP_FLOAT) { /* this never happens in practice */            printf("<w|c mult=%5.2f> ", UNLOG_NATURAL(prob_mult));         }         else {            printf("<w|c mult=%5.2f> ", prob_mult);         }      }      /* Convert word N-gram into class N-gram */      for (i=0; i<nSize; i++) {	 words[i] = ((WordProb*)(words[i]->ptr))->class;      }   }   rLev++;   ptype = lm->probType;   if (nSize > lm->nSize) {      words += nSize-lm->nSize; nSize = lm->nSize;   }   acs = lm->gInfo[nSize].aInfo; acs->count++;   if (trace&T_PROB) {      printf("[ ");      printf("(%s",words[nSize-1]->name);      if (nSize > 1) {	 printf(" |");	 for(i=0; i<nSize-1; i++) printf(" %s",words[i]->name);      }      printf(") ");   }   if (nSize==1) {  /* lookup unigram separately */      if ((se = FindSE(lm->root.sea,0,lm->root.nse,LM_INDEX(words[0])))==NULL)	 HError(15490,"GetNGramProb: Unable to find %s in unigrams",words[0]->name);#ifdef LM_COMPACT      prob = Shrt2Prob(se->prob) * lm->gScale;#else      prob = se->prob;#endif      if (trace&T_PROB)	 printf("exact, ");   } else {         /* generic n-gram lookup, n>1 */      for (fe=&(lm->root), i=0; i<nSize-1; i++) {	 if ((fe=FindFE(fe->fea, 0, fe->nfe, LM_INDEX(words[i])))==NULL)	    break;      }      if ((fe == NULL) || (fe->nse == 0)) {	 if (lm->classLM) {            lm->classLM = FALSE;            prob = GetNGramProb(lm,words+1,nSize-1);            lm->classLM = TRUE;	 }	 else prob = GetNGramProb(lm,words+1,nSize-1);	 if (trace&T_PROB)	    printf("replaced, ");	 acs->nmiss++;	 if ((trace&T_TOP) &&  (fe != NULL) && (fe->nse == 0)) {	    for (s = sbuf, i=0; i<nSize-1; i++) {	       sprintf(s,"%s ",words[i]->name); s+=strlen(s);	    }	    HError(-15492, "GetNGramProb: FLEntry.nse==0; original ARPA LM?\n%s",sbuf);	 }      } else {	 if ((se = FindSE(fe->sea, 0, fe->nse, LM_INDEX(words[nSize-1])))!=NULL) {#ifdef LM_COMPACT	    prob = Shrt2Prob(se->prob) * lm->gScale;#else	    prob = se->prob;#endif	    if (trace&T_PROB)	       printf("exact, ");	    acs->nhits++;	 } else {	    if (lm->classLM) {               lm->classLM = FALSE;               prob = GetNGramProb(lm,words+1,nSize-1);               lm->classLM = TRUE;	    }	    else prob = GetNGramProb(lm,words+1,nSize-1);	    if (ptype==LMP_FLOAT)	       prob *= fe->bowt;	    else	       prob += fe->bowt;	    if (trace&T_PROB)	       printf("backed-off %.4f, ",fe->bowt);	    acs->nboff++;	 }      }   }   if (lm->classLM)   {      if (lm->probType & LMP_FLOAT) {         /* This looks nasty but in fact we never execute this */         prob *= UNLOG_NATURAL(prob_mult);      }      else {         prob += prob_mult;      }   }   acs->prob += prob; acs->prob2 += prob*prob;   if (trace&T_PROB)      printf("prob %.4f ]%s",prob,(rLev==0) ? "\n" : " ");   rLev--;   return prob;}/* EXPORT-> LMTrans: calls GetNGramProb, but instead of taking a full   n-gram of context we take a pointer to a context and a single word;   we also return a langage model context state */LogFloat LMTrans2(LModel *LM, LMState src, LabId word, LMState *dest){   NameId ngram[LM_NSIZE], ngramRev[LM_NSIZE];   int nSize;   float prob;   NameId nid;   LogFloat prob_mult = 0.0;   FLEntry *context, *fe;   SMEntry *se;   BackOffLM *lm;   float bo_weight;   LMProbType ptype;   int i, index;   int nShorten;  /* Amount to shorten n-gram by when searching for prob */   lm = LM->data.hlmModel;   ptype = lm->probType;   if (src) {      context = (FLEntry *) src;   }   else {      context = &(lm->root); /* No context yet */   }   /* Convert word text to NameId */   if (lm->classLM) { /* class model */      nid = GetNameId(lm->classH, word->name, FALSE);      if (!nid)         HError(15499, "LMTrans: Attempt to predict token '%s' which is not in vocabulary", word);      /* Find word-given-class probability and convert to a class */      prob_mult = ((WordProb*)(nid->ptr))->prob;      if (trace&T_PROB) {         if (ptype & LMP_FLOAT) { /* this first never happens in practice */            printf("<w|c mult=%5.2f> ", UNLOG_NATURAL(prob_mult));         }         else {            printf("<w|c mult=%5.2f> ", prob_mult);         }      }   }   else { /* not a class model */      nid = GetNameId(lm->htab, word->name, FALSE);      if (!nid)         HError(15499, "LMTrans: Attempt to predict token '%s' which is not in vocabulary", word);   }   /* We need to reconstruct the context later so do it now incase we need to back off */   fe = context;   nSize = 0;   while (fe && fe!=&(lm->root) && nSize<LM_NSIZE) {      ngramRev[nSize] = lm->binMap[fe->ndx];      fe = fe->parent;      nSize++;   }   if (nSize>=LM_NSIZE)      HError(15499, "LMTrans: Context rebuilt to longer than compiled ngram size limit of %d", LM_NSIZE);   /* And now we know the length we can reverse it */   for (i=0; i<nSize; i++) ngram[i] = ngramRev[nSize-(i+1)];   ngram[nSize] = nid;   nSize++;   /* For debugging purposes, print out the full ngram */   /*printf("nsize=%d  ", nSize);     for (i=0; i<nSize; i++) printf("%s ", ngram[i]->name); printf("\n");*/   /* Search for probability */   if (ptype & LMP_FLOAT)      bo_weight = 1;   else      bo_weight = 0;   se = FindSE(context->sea, 0, context->nse, LM_INDEX(nid));   nShorten = 0;   fe = context;   while (!se) {      /* Multiply BO weight and shorten context */      if (ptype & LMP_FLOAT)         bo_weight *= fe->bowt;      else         bo_weight += fe->bowt;      nShorten++;      if (nShorten==nSize) { /* Unigram probability */         se = FindSE(lm->root.sea, 0, lm->root.nse, LM_INDEX(nid));         if (!se)            HError(15490, "LMTrans: Unable to find %s in unigrams", nid->name);      }      else { /* n>1 */         fe = &(lm->root);         for (i=nShorten; i<nSize-1; i++) {            fe = FindFE(fe->fea, 0, fe->nfe, LM_INDEX(ngram[i]));            if (!fe) HError(15491, "LMTrans: Unable to find shortened context in LM");         }         se = FindSE(fe->sea, 0, fe->nse, LM_INDEX(ngram[i]));      }   }#ifdef LM_COMPACT   prob = Shrt2Prob(se->prob) * lm->gScale;#else   prob = se->prob;#endif   if (ptype & LMP_FLOAT) {      prob = prob * bo_weight;   }   else {      prob = prob + bo_weight;   }   /* Now look for FLEntry for new context for any further following word */   /* Decide from which point in the context we start searching */   if (nSize == lm->nSize)      index = 1;   else      index = 0;   do {      fe = &(lm->root);      for (i=index; i<nSize; i++) {         fe = FindFE(fe->fea, 0, fe->nfe, LM_INDEX(ngram[i]));         if (!fe) {            /* Context not found, so shorten and retry */            index++;            break;         }      }   }   while (!fe); /* Works because if no context then we don't execute inner loop and fe=&(lm->root) */   *dest = fe;   if (lm->classLM) {      if (lm->probType & LMP_FLOAT) {         /* This looks nasty but in fact it never executes in practice */         prob *= UNLOG_NATURAL(prob_mult);      }      else {         prob += prob_mult;      }   }   return prob;}/* EXPORT-> GetNGramAddress: same as GetNGramProb but returns address   of structure. This is used to provide a unique id for a particular   context. This is used with Lattice Toolkit.   The final word in words[] is a dummy entry which is never   used.  Its value is undefined and should not be interpreted.   (ie. words[nSize-1]).  It works like this in order to parallel    GetNGramProb() */void *GetNGramAddress(BackOffLM *lm, NameId *words, int nSize){   int i;   FLEntry *fe;   char *s, sbuf[256];   static int rLev 

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
色婷婷一区二区| 国产69精品久久777的优势| 精品欧美久久久| 色一情一伦一子一伦一区| 国产综合色产在线精品| 美国十次了思思久久精品导航| 亚洲欧美日韩国产成人精品影院 | 久久久久久久久久久99999| 欧美日韩国产另类不卡| 成人激情免费电影网址| 国产一区二区三区av电影| 五月天中文字幕一区二区| 一区二区三区在线视频免费观看| 国产日韩欧美精品在线| 久久香蕉国产线看观看99| 精品久久人人做人人爽| 欧美肥妇bbw| 日韩视频免费观看高清完整版在线观看 | 91精品国产综合久久婷婷香蕉| 大桥未久av一区二区三区中文| 国产成人精品影视| 国产精品一区在线观看你懂的| 精品一区二区三区免费| 久久99国产精品免费网站| 久久国产精品无码网站| 韩国精品久久久| 国产精品一二三四| thepron国产精品| 成人精品视频网站| 成人精品视频一区二区三区尤物| 国产日韩欧美制服另类| 国产三级欧美三级| 国产精品伦理在线| 亚洲精品高清视频在线观看| 亚洲国产精品嫩草影院| 男女视频一区二区| 国产精品亚洲综合一区在线观看| 国产大陆精品国产| 欧美在线免费观看亚洲| 日韩欧美一级二级三级久久久| 国产精品网站在线观看| 亚洲国产综合在线| 国产乱码精品一区二区三区忘忧草 | 国产精品毛片久久久久久| 亚洲蜜臀av乱码久久精品蜜桃| 视频在线观看一区| 99久久精品国产毛片| 日韩一区二区三区免费看| 国产精品久久久久影院老司| 日韩va亚洲va欧美va久久| 成人免费的视频| 日韩一区二区三| 亚洲丝袜精品丝袜在线| 精品一区二区在线视频| 91丨九色丨国产丨porny| 欧美男女性生活在线直播观看| 日韩一区二区麻豆国产| 久久亚洲精华国产精华液 | 久久影院视频免费| 国产精品天美传媒| 亚洲无人区一区| 狠狠色伊人亚洲综合成人| 91免费在线播放| 天天操天天综合网| 国产精品成人网| 婷婷中文字幕一区三区| 国产精品99久久久久久久vr| 99免费精品在线观看| 欧美一级久久久久久久大片| 国产精品视频yy9299一区| 亚洲永久精品大片| 久久精品国产精品亚洲精品| 99视频精品在线| 欧美一区二区播放| 亚洲欧美在线aaa| 免费成人在线视频观看| 91麻豆国产自产在线观看| 日韩欧美国产一区二区三区| 亚洲色大成网站www久久九九| 青青草97国产精品免费观看| 成人福利在线看| 欧美一区二区大片| 中文字幕一区二区三区在线不卡| 日韩精品视频网| 色综合久久久久久久| 欧美成人女星排行榜| 亚洲一级在线观看| 成人激情文学综合网| 精品久久久久一区二区国产| 夜色激情一区二区| eeuss鲁一区二区三区| 精品免费日韩av| 亚洲国产视频在线| 波多野结衣在线aⅴ中文字幕不卡| 欧美日本乱大交xxxxx| 亚洲欧美日韩国产综合| 成人性视频免费网站| 日韩欧美一二三| 首页亚洲欧美制服丝腿| 日本高清不卡aⅴ免费网站| 亚洲国产成人午夜在线一区| 国内久久精品视频| 日韩三级精品电影久久久| 亚洲国产中文字幕在线视频综合| a亚洲天堂av| 自拍偷拍欧美激情| 99在线精品视频| 欧美不卡视频一区| 亚洲国产精品一区二区尤物区| 成人av先锋影音| 久久午夜免费电影| 久久精品久久99精品久久| 欧美日韩国产电影| 日韩成人av影视| 色婷婷精品大在线视频| 国产精品久久久久久久久晋中| 亚洲午夜精品网| 一本色道久久综合狠狠躁的推荐| 亚洲另类一区二区| yourporn久久国产精品| 亚洲免费观看在线观看| 色哟哟欧美精品| 亚洲一级二级在线| 69成人精品免费视频| 日韩专区欧美专区| 日韩三级电影网址| 国产一级精品在线| 国产精品午夜久久| 91在线丨porny丨国产| 一区二区三区影院| 欧美日韩精品一区二区三区| 美美哒免费高清在线观看视频一区二区 | 亚洲妇女屁股眼交7| 欧美日韩国产片| 奇米影视一区二区三区小说| 欧美电影免费观看高清完整版在线观看 | 成人一区二区三区| 国产精品久久久久久久久免费桃花 | 国产欧美日韩激情| 国产美女在线精品| 亚洲欧美国产毛片在线| 欧美精品日韩一区| 国产精品乡下勾搭老头1| 亚洲女人的天堂| 在线不卡中文字幕播放| 国产不卡在线视频| 午夜一区二区三区视频| 国产午夜精品一区二区| 波多野结衣中文字幕一区二区三区| 亚洲午夜影视影院在线观看| 日韩精品中午字幕| 91麻豆国产香蕉久久精品| 丝袜美腿高跟呻吟高潮一区| 久久久久国产一区二区三区四区| 97se亚洲国产综合自在线不卡| 偷拍与自拍一区| 久久久www成人免费毛片麻豆 | 欧美日韩视频在线第一区| 精品在线免费视频| 亚洲精品日产精品乱码不卡| 日韩精品专区在线影院观看| 91在线免费播放| 国产一区二区三区精品视频| 亚洲国产精品自拍| 中文字幕乱码久久午夜不卡| 欧美日韩精品一区二区在线播放| 成人综合婷婷国产精品久久蜜臀 | 国产日韩v精品一区二区| 欧美日韩一区中文字幕| av在线一区二区| 韩国三级在线一区| 丝袜美腿亚洲一区二区图片| 亚洲欧洲国产日韩| 欧美精品一区二区三区在线播放| 色丁香久综合在线久综合在线观看| 久久超碰97人人做人人爱| 亚洲一区二三区| 国产精品麻豆视频| 在线不卡中文字幕播放| caoporen国产精品视频| 久久福利视频一区二区| 亚洲午夜在线视频| 亚洲人xxxx| 亚洲国产激情av| 日韩美女在线视频| 欧美日韩国产在线播放网站| 91免费看`日韩一区二区| 懂色av一区二区在线播放| 日本不卡123| 午夜成人免费电影| 午夜精品久久久久久久久久久| 亚洲另类在线制服丝袜| 亚洲人成亚洲人成在线观看图片| 欧美国产视频在线| 国产农村妇女精品| 久久久www成人免费无遮挡大片 | 日韩激情视频网站| 亚洲成人av中文| 亚洲免费观看高清完整版在线观看 | 91精品婷婷国产综合久久|