亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? dci.cc

?? this will produce the dci program in the src/ directory, or in /path/to/your/installation/bin if y
?? CC
?? 第 1 頁 / 共 3 頁
字號:
        num_freq++;        next_freq.add_itemset(cand, (T1) count_pair[1], cand[iter-2]);      	if (!first_order) {	  for (int i = 0; i < iter; i++) 	    counters.flag_item[cand[i]] = true;	  counters.first_item_counts[cand[0]]++;	}      }      else if (cand[iter-1] == key_pair[1]) {// the key pattern is the second generator        num_freq++;        next_freq.add_itemset(cand, (T1) count_pair[0], cand[iter-1]);	if (!first_order) {	  for (int i = 0; i < iter; i++) 	    counters.flag_item[cand[i]] = true;	  counters.first_item_counts[cand[0]]++;	}      }      else {// the key pattern is another subset: we must find it        if (key_pair[0] != (T) -1)          key=key_pair[0];        else          key=key_pair[1];	        int j=0;        for (int i=0; i<iter; i++)          if (cand[i] != key)            cand_subset[j++] = cand[i];                T tmp_key;        if (previous_freq.find_one_subset(cand_subset, tmp_key, count)) {          num_freq++;          next_freq.add_itemset(cand, (T1) count, key);	  if (!first_order) {	    for (int i = 0; i < iter; i++) 	      counters.flag_item[cand[i]] = true;	    counters.first_item_counts[cand[0]]++;	  }        }              }           } else {      if (count_pair[0] < count_pair[1]) { // remember min_count and corresponding key between generators	min_count = count_pair[0];	min_key = cand[iter - 1];	         }      else {	min_count = count_pair[1];	min_key = cand[iter - 2];      }            T other_key;      bool is_key_pattern = true;      bool pruned = false;            for (int del=iter-3; del>=0; del--) { // look for the subset with minimum count	int j = 0;	for (int i=0; i<iter; i++)	  if (i != del)	    cand_subset[j++] = cand[i];	if (previous_freq.find_one_subset(cand_subset, other_key, count) == 0) {	  pruned = true;	  break; // a subset is infrequent, prune the cand and take the next one	}	 		if (other_key == (T) -1) {// remember min_count and corresponding key	  if (min_count >= (T1) count) {	    min_count = count;	    min_key = cand[del];	  }	} else {// cand is not a key pattern	  is_key_pattern = false;	  int j1=0;	  for (int i=0; i<iter; i++)	    if (cand[i] != other_key)	      cand_subset[j1++] = cand[i];	  	  T tmp_key; // check if the associated subset is frequent	  if (previous_freq.find_one_subset(cand_subset, tmp_key, count)) {	    num_freq++;	    next_freq.add_itemset(cand, (T1) count, other_key);	    if (!first_order) {	      for (int i = 0; i < iter; i++) 		counters.flag_item[cand[i]] = true;	      counters.first_item_counts[cand[0]]++;	    }	    break;	  }	}      }      if (!pruned && is_key_pattern) // we must count candidate support!	{	  int prefix_len;	  for (prefix_len = 0; prefix_len < iter-1; prefix_len++) {	    if (cand[prefix_len] != CACHE_items[prefix_len])	      break;	  }    	  for (int i = prefix_len; i < iter; i++) { // copy to cache	    CACHE_items[i] = cand[i];	  }	  if (DCI_dataset.candidate_is_frequent_diffuse(cand, prefix_len, iter, 						       counters.min_count, count, 						       stats, !first_order)) {	    if (count != (int) min_count)	      next_freq.add_itemset(cand, (T1) count, (T) -1);	    else	      next_freq.add_itemset(cand, (T1) count, min_key);	    num_freq++;	    if (!first_order) {	      for (int i = 0; i < iter; i++) 		counters.flag_item[cand[i]] = true;	      counters.first_item_counts[cand[0]]++;	    }   	  }	}    }    // generate next candidate    cand_type = previous_freq.next_cand();    if (cand_type == END_GEN_CAND)       break;    else if (cand_type == NEW_PREFIX)       previous_freq.get_prefix(cand);    previous_freq.get_suffix(&cand[iter-2], key_pair, count_pair);    num_cand++;     }  if (first_order == false) {    DCI_dataset.order_bits_diffuse(counters);    first_order = true;  }      delete [] cand;  delete [] cand_subset;  delete [] CACHE_items;  if (write_output) { // dump to file frequent itemsets     FSout o(OUTF, iter);    if(!o.isOpen()) {      cerr << OUTF << " could not be opened for writing!" << endl;      exit(1);    }    next_freq.dump_itemsets(counters, o);  }#ifdef VERBOSE  print_statistics("DCIsk", iter, num_cand, num_freq, time.ReadChronos());//    cout << "one search : " << one_search << " ("<< ((float) one_search)/num_cand*100 << ")"<< endl;#else  printf("%d\n",num_freq);#endif  return;}// performs the current iteration with DCI // by using the optimizations for dense datasetstemplate <class T, class T1>void DCI_iter_compact_key(int iter,dci_items& counters, 				       set_of_frequents<T,T1>& previous_freq, 				       set_of_frequents<T,T1>& next_freq, 				       DCI_vertical_dataset<T>& DCI_dataset){  Chronos time;  time.StartChronos();  next_freq.reset(iter);  if (!previous_freq.init_gen_cand())    return;  T *cand, *cand_subset;  T *CACHE_items;  T key_pair[2];  T1 count_pair[2];  cand = new T[iter];  cand_subset = new T[iter-1];  CACHE_items = new T[iter];  CACHE_items[0] = counters.get_m1() - 1; // init CACHE - surely different !!!  int num_freq = 0;  int num_cand = 0;  int cand_type;  int count;  previous_freq.get_prefix(cand);  previous_freq.get_suffix(&cand[iter - 2], key_pair, count_pair);  num_cand++;  cand_type = NEW_PREFIX;    DCI_statistics stats;  stats.reset_stats();  DCI_dataset.init_cache(iter);  T key = 0;  T1 min_count;  T min_key;  int one_search=0;    while (1) {    if ((key_pair[0] != (T) - 1) || (key_pair[1] != (T) - 1)) {      // cand is surely not a key pattern       one_search++;            if (cand[iter-2] == key_pair[0]) { 	// the key pattern is the first generator        num_freq++;        next_freq.add_itemset(cand, (T1) count_pair[1], cand[iter-2]);      }      else if (cand[iter-1] == key_pair[1]) {	// the key pattern is the second generator        num_freq++;        next_freq.add_itemset(cand, (T1) count_pair[0], cand[iter-1]);      }      else {	// the key pattern is another subset: we must find it        if (key_pair[0] != (T) -1)          key=key_pair[0];        else          key=key_pair[1];	        int j=0;        for (int i=0; i<iter; i++)          if (cand[i] != key)            cand_subset[j++] = cand[i];                T tmp_key;        if (previous_freq.find_one_subset(cand_subset, tmp_key, count)) {          num_freq++;          next_freq.add_itemset(cand, (T1) count, key);        }              }           } else {      if (count_pair[0] < count_pair[1]) { 	// remember min_count and corresponding key between generators	min_count = count_pair[0];	min_key = cand[iter - 1];	         }      else {	min_count = count_pair[1];	min_key = cand[iter - 2];      }      T other_key;      bool is_key_pattern = true;      bool pruned = false;      for (int del=iter-3; del>=0; del--) { 	// look for the subset with minimum count	int j = 0;	for (int i=0; i<iter; i++)	  if (i != del)	    cand_subset[j++] = cand[i];	if (previous_freq.find_one_subset(cand_subset, other_key, count)==0){	  pruned = true;	  break;	  // a subset is infrequent, prune the cand and take the next one	}	 		if (other_key == (T) -1) {	  // remember min_count and corresponding key	  if (min_count >= (T1) count) {	    min_count = count;	    min_key = cand[del];	  }	} else {// cand is not a key pattern	  is_key_pattern = false;	  int j1=0;	  for (int i=0; i<iter; i++)	    if (cand[i] != other_key)	      cand_subset[j1++] = cand[i];	  	  T tmp_key; // check if the associated subset is frequent	  if (previous_freq.find_one_subset(cand_subset, tmp_key, count)) {	    num_freq++;	    next_freq.add_itemset(cand, (T1) count, other_key);	    break;	  }//	  else //	    cout << "ERROR\n";	}      }      if (!pruned && is_key_pattern) // we must count candidate support!      {	  	  int prefix_len;	  for (prefix_len = 0; prefix_len < iter-1; prefix_len++) {	    if (cand[prefix_len] != CACHE_items[prefix_len])	      break;	  }      	  for (int i = prefix_len; i < iter; i++) { // copy to cache 	    CACHE_items[i] = cand[i];	  }	  	  if (DCI_dataset.candidate_is_frequent_compact(cand, 						      prefix_len, iter, 						      (int) counters.min_count, 						      count, 						      stats)) {	    	    num_freq++;	    if (count != (int) min_count)	      next_freq.add_itemset(cand, (T1) count, (T) -1);	    else	      next_freq.add_itemset(cand, (T1) count, min_key);	  }	}      }    // generate next candidate    cand_type = previous_freq.next_cand();    if (cand_type == END_GEN_CAND)       break;    else if (cand_type == NEW_PREFIX)       previous_freq.get_prefix(cand);    previous_freq.get_suffix(&cand[iter-2], key_pair, count_pair);    num_cand++;     }  delete [] cand;  delete [] cand_subset;  delete [] CACHE_items;  if (write_output) { // dump to file frequent itemsets     FSout o(OUTF, iter);    if(!o.isOpen()) {      cerr << OUTF << " could not be opened for writing!" << endl;      exit(1);    }    next_freq.dump_itemsets(counters, o);  }  #ifdef VERBOSE  print_statistics("DCIdk", iter, num_cand, num_freq, time.ReadChronos());//    cout << "one search : " << one_search << " ("<< ((float) one_search)/num_cand*100 << ")"<< endl;#else  printf("%d\n",num_freq);#endif  return;}// performs the current iteration with DCI by using the optimizations for dense datasetstemplate <class T, class T1>void DCI_iter_compact(int iter,dci_items& counters, 				       set_of_frequents<T,T1>& previous_freq, 				       set_of_frequents<T,T1>& next_freq, 				       DCI_vertical_dataset<T>& DCI_dataset){  Chronos time;  time.StartChronos();  next_freq.reset(iter);  if (!previous_freq.init_gen_cand())    return;  T *cand;  T *CACHE_items;  cand = new T[iter];  CACHE_items = new T[iter];  CACHE_items[0] = counters.get_m1() - 1; // init CACHE - surely different !!!  int num_freq = 0;  int num_cand = 0;  int cand_type;  int count;  previous_freq.get_prefix(cand);  previous_freq.get_suffix(&cand[iter - 2]);  num_cand++;  cand_type = NEW_PREFIX;    DCI_statistics stats;  stats.reset_stats();  DCI_dataset.init_cache(iter);  while (1) {    int start;    if (cand_type == NEW_PREFIX)      start = 0;    else      start = iter - 2;    int prefix_len;    for (prefix_len = start; prefix_len < iter-1; prefix_len++) {      if (cand[prefix_len] != CACHE_items[prefix_len])	break;    }        for (int i = prefix_len; i < iter; i++) { // copy to cache       CACHE_items[i] = cand[i];    }    //DCI_dataset.set_is_included_flags(cand, prefix_len, iter);    if (DCI_dataset.candidate_is_frequent_compact(cand, 						prefix_len, iter, 						(int) counters.min_count, 						count, 						stats)) {      num_freq++;      next_freq.add_itemset(cand, (T1) count);    }    cand_type = previous_freq.next_cand();    if (cand_type == END_GEN_CAND)       break;    else if (cand_type == NEW_SUFFIX)       previous_freq.get_suffix(&cand[iter-2]);    else {      previous_freq.get_prefix(cand);      previous_freq.get_suffix(&cand[iter-2]);    }    num_cand++;     }    delete [] cand;  delete [] CACHE_items;  if (write_output) { // dump to file frequent itemsets     FSout o(OUTF, iter);    if(!o.isOpen()) {      cerr << OUTF << " could not be opened for writing!" << endl;      exit(1);    }    next_freq.dump_itemsets(counters, o);  }  #ifdef VERBOSE  print_statistics("DCId", iter, num_cand, num_freq, time.ReadChronos());#else  printf("%d\n",num_freq);#endif  return;}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美性生活大片视频| 国产精品久久久久久久久免费相片 | 色综合久久综合中文综合网| 欧美日韩精品一区二区天天拍小说| 精品成人在线观看| 亚洲午夜久久久久久久久电影院 | 久久99日本精品| 欧美日韩亚洲国产综合| 成人欧美一区二区三区白人 | 久久久久亚洲综合| 天天色 色综合| 一本一道久久a久久精品综合蜜臀| 精品少妇一区二区三区视频免付费| 伊人性伊人情综合网| 成人久久久精品乱码一区二区三区| 日韩亚洲欧美一区| 亚洲国产美国国产综合一区二区| 不卡的电视剧免费网站有什么| 欧美大片一区二区| 美女视频第一区二区三区免费观看网站 | 欧美精品乱码久久久久久按摩| 亚洲日本在线看| 91在线精品一区二区| 国产精品毛片久久久久久久| 国产成人99久久亚洲综合精品| 日韩精品专区在线| 蜜臀av国产精品久久久久| 在线播放中文字幕一区| 香港成人在线视频| 欧洲日韩一区二区三区| 亚洲综合区在线| 欧美曰成人黄网| 亚洲综合男人的天堂| 精品视频一区二区三区免费| 一二三四区精品视频| 精品视频1区2区| 毛片不卡一区二区| 日韩亚洲欧美高清| 美女性感视频久久| 久久嫩草精品久久久久| 成人久久18免费网站麻豆| 国产精品第一页第二页第三页| 99久久精品一区| 亚洲综合色网站| 欧美一三区三区四区免费在线看 | 欧美第一区第二区| 国产精品影视在线观看| 国产精品乱人伦| 在线观看一区日韩| 蜜乳av一区二区| 国产区在线观看成人精品| 成人av在线影院| 依依成人综合视频| 日韩欧美激情四射| 国产超碰在线一区| 一个色综合av| 精品国产电影一区二区| 国产91在线看| 亚洲成av人片一区二区梦乃 | 捆绑变态av一区二区三区| 久久亚洲捆绑美女| 一本一道波多野结衣一区二区| 亚洲国产精品一区二区www| 日韩一二三区视频| 99国产精品久| 精品一区二区三区香蕉蜜桃| 国产精品污网站| 69p69国产精品| 成人av在线资源网站| 免费人成在线不卡| 亚洲美女视频在线| 精品精品欲导航| 欧日韩精品视频| 国产成人午夜高潮毛片| 亚洲精品久久久久久国产精华液| 欧美mv日韩mv国产网站app| 色狠狠色噜噜噜综合网| 久久国内精品视频| 男人的j进女人的j一区| 国产精品国产三级国产aⅴ入口| 7777精品伊人久久久大香线蕉完整版 | 亚洲国产aⅴ成人精品无吗| 精品国产自在久精品国产| 在线视频一区二区免费| 国产福利一区在线观看| 免费的国产精品| 一区二区不卡在线视频 午夜欧美不卡在| 久久综合色一综合色88| 欧美精品777| 91激情五月电影| heyzo一本久久综合| 久久91精品国产91久久小草| 三级在线观看一区二区| 亚洲视频你懂的| 国产精品美女久久久久久久久| 精品国产露脸精彩对白| 制服丝袜av成人在线看| 欧美在线视频全部完| 丁香天五香天堂综合| 美女视频黄久久| 日韩高清在线观看| 亚洲国产视频直播| 伊人一区二区三区| 亚洲精品国产品国语在线app| 久久色在线观看| 精品成人一区二区| 日韩欧美激情四射| 日韩美女主播在线视频一区二区三区| 欧美亚州韩日在线看免费版国语版| 不卡视频一二三| 97精品久久久午夜一区二区三区 | 亚洲国产精品激情在线观看 | 国产一区二区在线看| 久久99精品久久久久久国产越南 | 免费欧美日韩国产三级电影| 亚洲网友自拍偷拍| 亚洲一线二线三线视频| 亚洲在线视频一区| 五月婷婷综合激情| 久久av老司机精品网站导航| 美女视频黄频大全不卡视频在线播放| 日本v片在线高清不卡在线观看| 污片在线观看一区二区| 久久成人综合网| 国产精品夜夜嗨| 99国产精品视频免费观看| 欧美最新大片在线看| 欧美综合欧美视频| 欧美电影在线免费观看| 日韩免费观看高清完整版| 久久久影视传媒| 1区2区3区国产精品| 亚洲五码中文字幕| 蜜桃免费网站一区二区三区| 国产美女主播视频一区| 99视频精品免费视频| 日本乱码高清不卡字幕| 欧美一卡2卡三卡4卡5免费| 精品国产亚洲在线| 1024国产精品| 日韩成人精品在线观看| 成人激情小说网站| 欧美性猛交xxxx乱大交退制版 | 欧美日韩国产一级片| 日韩欧美一区二区在线视频| 国产日韩亚洲欧美综合| 亚洲精品午夜久久久| 久久精品久久99精品久久| www.久久久久久久久| 欧美情侣在线播放| 国产欧美1区2区3区| 一个色综合av| 高清不卡一区二区在线| 欧美日韩三级视频| 国产精品第一页第二页第三页| 同产精品九九九| av中文字幕一区| 精品久久一区二区| 亚洲国产精品一区二区www在线| 国产麻豆日韩欧美久久| 欧美人与性动xxxx| 国产精品无码永久免费888| 日韩制服丝袜先锋影音| 99视频在线精品| 精品对白一区国产伦| 五月婷婷激情综合网| caoporn国产精品| 337p日本欧洲亚洲大胆精品| 亚洲国产精品视频| 97久久精品人人做人人爽50路 | 5858s免费视频成人| 国产精品视频一二| 精品一区中文字幕| 这里是久久伊人| 一区二区三区久久| 成人免费av在线| 国产亚洲精品久| 久久精品国产亚洲高清剧情介绍 | 欧美mv和日韩mv国产网站| 一区二区三区欧美视频| av欧美精品.com| 日本一区二区综合亚洲| 国内欧美视频一区二区| 日韩视频中午一区| 蜜臀久久99精品久久久画质超高清| 欧美在线不卡视频| 一区二区欧美国产| 91国产精品成人| 亚洲美女免费视频| 97久久精品人人做人人爽| 中文字幕国产一区| 国产剧情在线观看一区二区| 精品福利一二区| 国产精品影音先锋| 国产欧美一区二区在线观看| 国产一区二区在线免费观看| 久久众筹精品私拍模特| 国产精品中文字幕欧美| 国产精品青草久久| 99久久婷婷国产综合精品|