亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? cluster.c

?? 隱馬爾科夫模型工具箱
?? C
?? 第 1 頁 / 共 5 頁
字號:
{   bi_count *ptr;   int       space_used;   if ((ng[0]>=max_words) || (ng[1]>=max_words)) {      /* Something's gone wrong */      HError(17093, "bigram_add: Found a word id higher than the base+number of words - all word ids are expected to be allocated in an unbroken chunk.\n[Current bigram is (%d,%d). Number of words is %d]", ng[0], ng[1], max_words);   }   /* Keep backward count */   backward[ng[1]].size++;   if (ng[0] == last_word) {      /* Make sure there's room in the buffer */      if (store_idx >= curr_bistore_size) {         /* Expand bigram buffer store to cope */         curr_bistore_size += bigram_buffer_grow;         if (trace & T_MEM) {            printf("Expanding bigram read buffer to %d entries\n", curr_bistore_size);         }         store = realloc(store, curr_bistore_size*sizeof(bi_count));      }      /* Store in buffer */      store[store_idx].id = ng[1];      store[store_idx].count = count;      store_idx++;      return;   }   /* Otherwise we must have just gone on to a new word, so keep the old      details */   forward[last_word].size = store_idx;   space_used = store_idx*sizeof(bi_count);   ptr = get_space(space_used);   memcpy(ptr, store, space_used);   forward[last_word].bi = ptr;   /* And go on to the next entry */   last_word = ng[0];   store[0].id = ng[1];   store[0].count = count;   store_idx = 1;}/* Call when all bigrams have been passed in */void bigram_added_all(void){   bi_count *ptr;   int       space_used;   int       i, j, backward_id;   /* Store last set of details */   forward[last_word].size = store_idx;   space_used = store_idx*sizeof(bi_count);   ptr = get_space(space_used);   memcpy(ptr, store, space_used);   forward[last_word].bi = ptr;   free(store);   sum_of_all_bigram_counts = 0;   /* Generate backward lookup table */   if (trace & T_EXTRA) {      printf("Building bigram backward lookup table...");      fflush(stdout);   }   /* Allocate required storage space */   for (i=0; i<max_words; i++) {      backward[i].bi = get_space(backward[i].size * sizeof(bi_count));      backward[i].size = 0; /* Reset to use as counter when building data */   }   /* Run through all forward data, copying into backward array */   for (i=0; i<max_words; i++) {      for (j=0; j<forward[i].size; j++) {         backward_id = forward[i].bi[j].id;         backward[backward_id].bi[backward[backward_id].size].id = i;         backward[backward_id].bi[backward[backward_id].size].count                                            = forward[i].bi[j].count;         backward[backward_id].size++;         sum_of_all_bigram_counts += forward[i].bi[j].count;      }   }   if (trace & T_EXTRA) {      printf(" done\n");   }}/* Must be called before almost any other function in this file will work */void bigram_init(int words) /* Pass ->used field from word-map */{   max_words = words;   forward = CNew(&global_stack, words * sizeof(bigrams));   backward = CNew(&global_stack, words * sizeof(bigrams));   if (trace & T_MEM) {      printf("Bigram store for %d words created\n", words);   }   last_word = 0;   store_idx = 0;   curr_bistore_size = initial_bigram_buffer;   store = calloc(initial_bigram_buffer, sizeof(bi_count));   if (trace & T_MEM) {      printf("Bigram read buffer of %d entries created\n", initial_bigram_buffer);   }}/* Main program control function */int main(int argc, char *argv[]){   char *s;   float weight;   /* used when loading gram files */   char *filename; /* used when loading gram files */   int   iterations=1, loop;   char *init_cmap = NULL;   char *recover_from = NULL;   char *write_classprobs = NULL;   char *write_classcounts = NULL;   Boolean read_gram_files=FALSE; /* Has the user passed any gram files? */   Boolean set_classes = FALSE, loaded_map = FALSE; /* Check for -c and -l */   Boolean keep_unk_sep = FALSE; /* Was -k passed? */   Boolean passed_unk = FALSE; /* Unknown word was passed in */   int start_word_id, end_word_id, unknown_word_id;   int numb_classes, min_classes;   char *ptr, *ptr2; /* temp results */   /* Initialise HTK/HLM modules */   InitShell(argc, argv, Cluster_version, Cluster_vc_id);   InitMem();   InitMath();   InitWave();   InitLabel();   InitLUtil();   InitWMap();   InitGBase();   SetConfParms();   /* Default start, end and unknown words */   strcpy(sent_start, DEF_STARTWORD);   strcpy(sent_end, DEF_ENDWORD);   strcpy(unknown_w, DEF_UNKNOWNNAME);   /* Default number of classes */   numb_classes = classes_get_default();   /* Parse command line */   if (!InfoPrinted() && NumArgs() == 0)      ReportUsage();   if (NumArgs() == 0)      Exit(EXIT_FAILURE);   /* Create a global stack and heap */   CreateHeap(&global_stack, "Clusterer stack", MSTAK, 1, 0.0, 8192, 8192);   CreateHeap(&global_heap, "Clusterer heap", MHEAP, block_grab_size, 0.0, 1, 1);   while (NextArg() == SWITCHARG) {      s = GetSwtArg();      if (strlen(s) !=1 )         HError(17019, "Cluster: Bad switch %s; must be single letter",s);      switch(s[0]) {         case 'c':            if (NextArg()!=INTARG)               HError(17019,"Cluster: number of categories expected for -c");	    numb_classes = GetIntArg();            classes_set_number(numb_classes);	    set_classes = TRUE;            break;         case 'i':            if (NextArg()!=INTARG)               HError(17019,"Cluster: number of iterations expected for -i");            iterations = GetIntArg();            break;          case 'r':            if (NextArg()!=INTARG)               HError(17019,"Cluster: recovery export frequency expected for -r");            rec_freq = GetIntArg();            break;         case 'm':	    classes_showMLV(1);	    break;         case 'o':            if (NextArg()!=STRINGARG)               HError(17019,"Cluster: output filename prefix expected for -o");            set_output_prefix(GetStrArg());            break;         case 'p':            if (NextArg()!=STRINGARG)               HError(17019,"Cluster: output filename expected for -p");            write_classprobs = GetStrArg();            break;         case 'q':            if (NextArg()!=STRINGARG)               HError(17019,"Cluster: output filename expected for -q");            write_classcounts = GetStrArg();            break;         case 'l':            if (NextArg()!=STRINGARG)               HError(17019,"Cluster: output filename prefix expected for -l");            init_cmap = GetStrArg();	    loaded_map = TRUE;            break;         case 's':            if (NextArg()!=STRINGARG)               HError(17019,"Cluster: sentence start word expected for -s");            strcpy(sent_start, GetStrArg());            break;         case 't':            if (NextArg()!=STRINGARG)               HError(17019,"Cluster: sentence end word expected for -t");            strcpy(sent_end, GetStrArg());            break;         case 'u':            if (NextArg()!=STRINGARG)               HError(17019,"Cluster: unknown word token expected for -u");            strcpy(unknown_w, GetStrArg());	    passed_unk = TRUE;            break;         case 'x':            if (NextArg()!=STRINGARG)               HError(17019,"Cluster: recovery filename expected for -x");            recover_from = GetStrArg();            break;         case 'w':            if (NextArg()!=STRINGARG)               HError(17019, "Cluster: wordmap sort order expected for -w");            strcpy(tmp, GetStrArg());            for (ptr=tmp; *ptr!=0; *ptr=toupper(*ptr), ptr++);            if (strcmp(tmp, "WMAP")==0) {               sort_order = SORT_WMAP;            }            else if (strcmp(tmp, "FREQ")==0) {               sort_order = SORT_FREQ;            }            else {               HError(17019, "Cluster: -w expects either WMAP or FREQ");            }            break;         case 'k':	    classes_keep_unk_separate(TRUE);	    keep_unk_sep = TRUE;            break;         case 'v':	    verbose = TRUE;            break;         case 'n':	    write_logfile = !write_logfile;            break;         case 'T':            trace = GetChkedInt(0,017,s); break;         default:            HError(17019,"Cluster: Unknown switch %s",s);      }   }   if (NextArg()!=STRINGARG)      HError(17019, "Cluster: word map file name expected");   CreateWordMap(GetStrArg(), &wmap, 0);   min_classes = 4 + (keep_unk_sep?1:0); /* Minimum number of classes */   if (loaded_map && set_classes) {      HError(-17019, "Ignoring -c option: when combined with -l the number of classes in the existing map must be used");   }   else if (numb_classes < min_classes) {      HError(17019, "It doesn't make sense to specify less than %d classes -\n    %d classes are reserved, and you need at least 2 more", min_classes, min_classes-2);   }   /* See if start and end word occur in the data */   if (!GetLabId(sent_start, FALSE)) {      HError(17051, "Sentence start token '%s' not in word list");   }   if (!GetLabId(sent_end, FALSE)) {      HError(17051, "Sentence end token '%s' not in word list");   }   /* We can't keep the unknown word in its own class if one wasn't passed */   if (!GetLabId(unknown_w, FALSE) && keep_unk_sep) {      HError(17051, "Unknown word token '%s' not in word list and -k passed", unknown_w);   }   /* And generate a sensible warning if necessary: */   if (!GetLabId(unknown_w, FALSE) && passed_unk) {      HError(-17051, "Unknown word token '%s' was explicitly given with -u, but does not occur in the word map", unknown_w);   }   start_word_id = GetMEIndex(&wmap, (((MapEntry *)(GetLabId(sent_start, FALSE)->aux))->ndx));   end_word_id = GetMEIndex(&wmap, (((MapEntry *)(GetLabId(sent_end, FALSE)->aux))->ndx));   if (keep_unk_sep) {      unknown_word_id = GetMEIndex(&wmap, (((MapEntry *)(GetLabId(unknown_w, FALSE)->aux))->ndx));   }   else {      unknown_word_id = 0;   }   set_ids(start_word_id, end_word_id, unknown_word_id);   /* If we're doing no iterations we want to ignore the given filename      prefix and use the one from the classmap - this way we'll write the      correct information into the saved probabilities file header */   if (iterations==0 && init_cmap) {      ptr = strrchr(init_cmap, '.');      if (ptr) {         *ptr = '\0';         ptr2 = strrchr(init_cmap, '.');         if (ptr2) {            *ptr2 = '\0';            set_output_prefix(init_cmap);            *ptr2 = '.';         }         else            set_output_prefix(init_cmap);         *ptr = '.';      }      else {         set_output_prefix(init_cmap);      }   }   if (trace & T_FILE) {      printf("Wordmap loaded - %d words\n", wmap.used);   }   unigram_init(wmap.used);   bigram_init(wmap.used);   /* Add input gram files to input set */   if (trace & T_TOP)      printf("Preparing input gram set\n");   CreateHeap(&imem, "inputset", MSTAK, 1, 0.0, 1000, 1000);   CreateHeap(&imem2, "inputset2", MSTAK, 1, 0.0, 1000, 1000);   CreateInputSet(&imem, &wmap, &inset);   CreateInputSet(&imem2, &wmap, &inset2);   weight = 1.0;   while (NextArg() == STRINGARG || NextArg() == FLOATARG) {      if (NextArg() == FLOATARG) {         weight = GetFltArg();      }      if (weight==0.0 || weight<-10000.0 || weight>10000.0) {         HError(17019, "Improbable gram file weight (%.4f)", weight);      }      if (NextArg()!=STRINGARG) {         HError(17019,"Gram file name expected");      }      filename = GetStrArg();      AddInputGFile(&inset, filename, weight);      AddInputGFile(&inset2, filename, weight);      read_gram_files = TRUE;      if (trace & T_TOP)         printf("Input gram file %s added (weight=%f)\n", filename, weight);   }   if (!read_gram_files) {      HError(17019, "No gram files passed");   }   LoadBiGrams();   LoadUniGrams();   bigram_added_all();   DeleteHeap(&imem);   DeleteHeap(&imem2);   if (init_cmap) {      import_classmap(init_cmap, wmap.used);   }   else if (recover_from) {      do_recovery(recover_from, wmap.used);   }   /* Allocate memory and compute bigram pair arrays */   if (!recover_from) {      classes_init(wmap.used);      /* Perform default initial clustering */      if (!init_cmap) {         initial_cluster();      }      /* Calculate initial counts required */      setup_all_counts();   }   /* Run clustering algorithm */   for (loop=0; loop<iterations; loop++) {      cluster_words(1);      export_classes(0);   }   if (write_classprobs) {      write_word_probs(write_classprobs);   }   if (write_classcounts) {      write_word_counts(write_classcounts);   }   if (trace &

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
免费观看30秒视频久久| 亚洲美女屁股眼交| 欧美色图12p| 精品综合久久久久久8888| 久久99在线观看| 91高清在线观看| 欧美国产精品一区二区三区| 亚洲韩国一区二区三区| 风间由美一区二区av101| 欧美日韩国产综合草草| 国产精品美女久久久久aⅴ国产馆 国产精品美女久久久久av爽李琼 国产精品美女久久久久高潮 | 欧美日韩精品一区二区天天拍小说| 精品久久久久久久久久久久包黑料 | 日本不卡1234视频| 日本不卡视频在线| 美女一区二区三区| 久久99精品久久只有精品| 国内精品久久久久影院色| 国产毛片精品视频| 91丝袜美腿高跟国产极品老师 | 91在线国产观看| 欧美在线视频日韩| 欧美一区二区三区爱爱| 精品理论电影在线| 欧美国产一区二区在线观看 | 91精品国产综合久久久蜜臀图片| 91精品国产综合久久国产大片| 欧美日韩国产首页在线观看| 日韩视频一区二区三区在线播放 | 成人app网站| 欧美色视频在线观看| 精品入口麻豆88视频| 日本一区二区电影| 亚洲大尺度视频在线观看| 久久99国产精品久久99| 色悠久久久久综合欧美99| 欧美日韩免费高清一区色橹橹 | 欧美一区二区三区啪啪| 久久亚洲精品小早川怜子| 亚洲精品日韩综合观看成人91| 日韩高清一级片| jvid福利写真一区二区三区| 欧美日韩精品一区视频| 国产精品亲子伦对白| 蜜臀av一区二区| 91女人视频在线观看| 亚洲精品在线网站| 亚洲成人免费在线| 成人伦理片在线| 国产高清不卡一区| 欧美一区二视频| 狠狠色伊人亚洲综合成人| 日本一区二区不卡视频| 99精品1区2区| 亚洲福利视频一区| 日韩欧美在线影院| 国产aⅴ精品一区二区三区色成熟| 国产日本一区二区| 欧美高清性hdvideosex| 看国产成人h片视频| 中文字幕久久午夜不卡| 日本久久一区二区三区| 天天免费综合色| 亚洲精品在线电影| 一本大道久久a久久综合| 图片区日韩欧美亚洲| 日韩欧美国产成人一区二区| 99视频国产精品| 中文字幕第一区二区| 精品亚洲国内自在自线福利| 制服丝袜亚洲播放| 三级不卡在线观看| 欧美日韩精品欧美日韩精品一综合| 亚洲四区在线观看| 91香蕉视频在线| 中文字幕在线观看不卡| 波多野结衣91| 国产精品乱人伦中文| 国产91清纯白嫩初高中在线观看| 精品国产髙清在线看国产毛片| 午夜视频一区二区| 制服丝袜日韩国产| 免费精品视频最新在线| 日韩欧美中文字幕精品| 久久99精品久久久久婷婷| 欧美精品一区二区三区蜜臀| 国产一区二区中文字幕| 亚洲国产高清在线观看视频| 成人一区二区三区中文字幕| 国产精品久久久久久久久图文区| 北条麻妃国产九九精品视频| 亚洲精品菠萝久久久久久久| 91黄色激情网站| 男女视频一区二区| 久久久夜色精品亚洲| eeuss影院一区二区三区| 最新热久久免费视频| 欧美色综合影院| 另类小说欧美激情| 欧美国产亚洲另类动漫| 欧美亚洲图片小说| 麻豆免费精品视频| 国产精品久久久久影院老司 | 亚洲欧美aⅴ...| 7777精品伊人久久久大香线蕉完整版| 日本伊人色综合网| 国产欧美精品在线观看| 色94色欧美sute亚洲线路二| 天堂一区二区在线免费观看| 久久精品视频在线看| 91丨九色丨蝌蚪富婆spa| 日日摸夜夜添夜夜添精品视频| 亚洲精品在线免费观看视频| 99久久国产综合精品女不卡| 亚洲成人免费看| 国产精品午夜在线| 欧美一区二区不卡视频| 成人性生交大片免费看中文网站| 亚洲精品乱码久久久久久久久| 日韩一区二区三区电影| 99视频一区二区| 黑人精品欧美一区二区蜜桃| 亚洲九九爱视频| 国内精品久久久久影院薰衣草 | 日本视频一区二区| 婷婷综合在线观看| 日韩经典中文字幕一区| 五月婷婷色综合| 麻豆91免费看| 国产一区二区看久久| 成人中文字幕在线| 欧美变态口味重另类| 日韩三级视频在线观看| 久久先锋影音av| 亚洲欧洲日韩av| 亚洲一区欧美一区| 日韩中文字幕av电影| 麻豆久久久久久久| 粉嫩久久99精品久久久久久夜| 成人亚洲一区二区一| 色婷婷av久久久久久久| 日本aⅴ免费视频一区二区三区| 精品99久久久久久| 精品免费99久久| 日韩一区二区高清| 在线不卡免费欧美| 欧美日韩三级一区| 欧美日韩亚洲综合在线| 在线视频国内一区二区| 91丝袜高跟美女视频| 99精品视频在线观看免费| 成人综合在线网站| 国产成人亚洲精品青草天美| 精品一区二区免费在线观看| 蜜臀a∨国产成人精品| 日韩国产欧美视频| 日本不卡123| 老司机精品视频在线| 琪琪一区二区三区| 美女在线观看视频一区二区| 久久电影网电视剧免费观看| 欧美a一区二区| 麻豆成人在线观看| 国产一区二区三区日韩| 国产白丝网站精品污在线入口| 国产麻豆一精品一av一免费| 高清不卡一二三区| 成人国产免费视频| 色猫猫国产区一区二在线视频| 在线免费观看日本一区| 欧美精品丝袜久久久中文字幕| 91精品国产一区二区人妖| 日韩精品专区在线影院重磅| 精品999久久久| 亚洲欧美影音先锋| 视频在线观看一区二区三区| 日韩成人午夜精品| 国产另类ts人妖一区二区| 风间由美一区二区三区在线观看| 99久久综合精品| 欧美剧情电影在线观看完整版免费励志电影 | 9191久久久久久久久久久| 777奇米成人网| 中文字幕精品三区| 亚洲第一福利视频在线| 精品一区二区久久| 色老汉av一区二区三区| 欧美变态口味重另类| 亚洲男同1069视频| 韩国女主播成人在线| 99国产精品视频免费观看| 91精品国产91久久久久久最新毛片| 久久久精品欧美丰满| 亚洲综合999| 高清在线观看日韩| 欧美一级生活片| 最近日韩中文字幕| 国产一区亚洲一区| 69堂精品视频| 一个色在线综合|