亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? tddtinducer.java

?? 用JAVA實現的C4.5算法
?? JAVA
?? 第 1 頁 / 共 5 頁
字號:
    * leading to them.
    */
   public int num_nontrivial_leaves()
   {
      was_trained(true);
      return decisionTreeCat.num_nontrivial_leaves();
   }

   /** Checks if this inducer has a valid decision tree.
    * @return True iff the class has a valid decisionTree categorizer.
    * @param fatalOnFalse TRUE if an error message should be displayed if the inducer is not trained,
    * FALSE otherwise.
    */
   public boolean was_trained(boolean fatalOnFalse)
   {
      if(fatalOnFalse && decisionTreeCat == null)
         Error.err("TDDTInducer.was_trained: No decision tree categorizer. "
	           + " Call train() to create categorizer -->fatal_error");
      return decisionTreeCat != null;
   }

   /** Induce a decision tree in the given graph.
    * @param aCgraph                  The graph that will contain the decision tree.
    * @param tieBreakingOrder The tie breaking order for breaking distribution ties.
    * @param numSubtreeErrors Number of errors to this point.
    * @param pessimisticSubtreeErrors Error estimate if this was a leaf node.
    * @param numLeaves                The number of leaves in the decision tree.
    * @param remainingSiblings Siblings that have not be induced yet.
    * @return The root node of the decision tree.
    */
   public Node induce_decision_tree(CGraph aCgraph, int[] tieBreakingOrder, DoubleRef numSubtreeErrors, DoubleRef pessimisticSubtreeErrors, IntRef numLeaves, int remainingSiblings)
   {
      if (TS.no_weight())
         Error.fatalErr("TDDTInducer.induce_decision_tree: list has zero weight");
   
//      DBG(pessimisticSubtreeErrors = -1);
      // Create a decision tree object to allow building nodes in the CGraph.
      DecisionTree decisionTree = new DecisionTree(aCgraph);

      // Display training InstanceList. -JL
      logOptions.LOG(4, "Training set ="+'\n'+TS.out(false)+'\n');

      LinkedList catNames = new LinkedList();
//      catNames[0] = null;
      NodeCategorizer[] rootCat = new NodeCategorizer[1];
 	rootCat[0] = best_split(catNames);
      if (rootCat[0] == null) {
         rootCat[0] = create_leaf_categorizer(TS.total_weight(),
					tieBreakingOrder, numSubtreeErrors,
					pessimisticSubtreeErrors);
         // We catch empty leaves in induce_tree_from_split. Hence, this can't be
         // a trivial leaf.
//         MLJ.ASSERT(MLJ.approx_greater(rootCat[0].total_weight(), 0),"TDDTInducer.induce_decision_tree: MLJ.approx_greater(rootCat[0].total_weight(), 0) == false");
         numLeaves.value = 1;
         decisionTree.set_root(decisionTree.create_node(rootCat, get_level()));
         MLJ.ASSERT(rootCat[0] == null,"TDDTInducer.induce_decision_tree: rootCat[0] != null"); // create_node gets ownership
//         IFDRIBBLE(dribble_level(level, "Leaf node", remainingSiblings));
      } else {
         NodeCategorizer splitCat = rootCat[0];
         decisionTree.set_root(decisionTree.create_node(rootCat, get_level()));
         MLJ.ASSERT(rootCat[0] == null,"TDDTInducer.induce_decision_tree: rootCat[0] != null"); // create_node gets ownership
         induce_tree_from_split(decisionTree, splitCat, catNames,
			     tieBreakingOrder, numSubtreeErrors,
			     pessimisticSubtreeErrors, numLeaves,
			     remainingSiblings);
      }

      catNames = null;
//      DBG(catNames = null);
      logOptions.LOG(6, "TDDT returning " + decisionTree.get_root() +'\n');

      MLJ.ASSERT(pessimisticSubtreeErrors.value >= 0,"TDDTInducer.induce_decision_tree: pessimisticSubtreeErrors.value < 0");
      return decisionTree.get_root();

//      System.out.println("Warning-->TDDTInducer.induce_decision_tree"
//         +" not implemented yet");
//      return null; 
   }

   /** Builds a decision tree categorizer for the given DecisionTree.
    * @param dTree The DecisionTree to use for creating the categorizer.
    */
   protected void build_categorizer(DecisionTree dTree)
   {
      decisionTreeCat = null;
      decisionTreeCat = new DTCategorizer(dTree, description(),
                                          TS.num_categories(),
                                          TS.get_schema());
      
      decisionTreeCat.set_leaf_dist_params(tddtOptions.leafDistType,
                                           tddtOptions.MEstimateFactor,
					   tddtOptions.evidenceFactor);
      //ASSERT(dtree==null);
   }

   /** Best_split finds the best split in the node and returns a categorizer
    * implementing it. It allocates and returns catNames containing the names of the
    * resulting categories.
    *
    * @param catNames The list of categories found in the Node.
    * @return The Categorizer using the list of categories.
    */
   abstract public NodeCategorizer best_split(LinkedList catNames);

   /** Computes the number of errors this node would make as a leaf. If
    * totalWeight is zero, the distribution is ignored, else totalWeight
    * must be the sum of the distribution counts.
    * @return The number of errors this node would make if it were a leaf
    * on the decision tree.
    * @param cat			The Categorizer for the node being checked.
    * @param predictClass	The category for which this node is being
    * checked.
    * @param totalWeight	The weight of all instances in a data set.
    */
   protected static double num_cat_errors(Categorizer cat, int predictClass, double totalWeight)
{
   double numErrors = 0;
   if (!cat.has_distr())
      Error.fatalErr("TDDTInducer.num_cat_errors: Categorizer has no distribution");
//   DBG(
//      const double[]& dist = cat.get_distr();
//      double sum = dist.sum();
//      // if (numInstances > 0) @@ will this fail?  If yes, comment why
//      MLJ.verify_approx_equal((StoredReal)sum, (StoredReal)totalWeight,
//                              "TDDTInducer.num_cat_errors: summation of "
//			      "distribution fails to equal number of "
//			      "instances", 100);
//      );  

   if (totalWeight > 0) { // we're not an empty leaf
      double numPredict = cat.get_distr()[predictClass - Globals.FIRST_NOMINAL_VAL];
      double nodeErrors = totalWeight - numPredict; // error count
//      ASSERT(nodeErrors >= 0);
      numErrors = nodeErrors; // increment parent's count of errors
   }
   return numErrors;
}

   /** Creates a leaf categorizer (has no children). We currently create
    * a ConstCategorizer with a description and the majority category.
    * Note that the augCategory will contain the correct string,
    * but the description will contain more information which may
    * help when displaying the graph. The augCategory string must
    * be the same for CatTestResult to work properly (it compares
    * the actual string for debugging purposes).
    * @return The LeafCategorizer created.
    * @param tieBreakingOrder Order for breaking distribution ties.
    * @param totalWeight The total weight of the training data set.
    * @param numErrors The number of errors this LeafCategorizer
    * will produce.
    * @param pessimisticErrors Error estimate if this was a leaf node.
    */
public LeafCategorizer create_leaf_categorizer(double totalWeight,
				     int[] tieBreakingOrder,
				     DoubleRef numErrors, DoubleRef pessimisticErrors)
{return create_leaf_categorizer(totalWeight,tieBreakingOrder,numErrors,pessimisticErrors,null);}

/** Creates a leaf categorizer (has no children). We currently create
 * a ConstCategorizer with a description and the majority category.
 * If the distrArray is given, we don't reference the training set,
 * except for its schema (used in pruning).<P>
 * Note that the augCategory will contain the correct string,
 * but the description will contain more information which may
 * help when displaying the graph. The augCategory string must
 * be the same for CatTestResult to work properly (it compares
 * the actual string for debugging purposes).
 * @return The LeafCategorizer created.
 * @param tieBreakingOrder Order for breaking distribution ties.
 * @param totalWeight The total weight of the training data set.
 * @param numErrors The number of errors this LeafCategorizer
 * will produce.
 * @param pessimisticErrors Error estimate if this was a leaf node.
 * @param distrArray Distribution of weight over labels.
 */
public LeafCategorizer create_leaf_categorizer(double totalWeight,
				     int[] tieBreakingOrder,
				     DoubleRef numErrors, DoubleRef pessimisticErrors,
				     double[] distrArray)
{
   // Find tiebreaking order.
   int[] myTiebreakingOrder = null;
   double[] weightDistribution = (distrArray!=null) ? distrArray : TS.counters().label_counts();
//   ASSERT(weightDistribution.low() == Globals.UNKNOWN_CATEGORY_VAL);
   if (tddtOptions.parentTieBreaking)
      myTiebreakingOrder = CatDist.merge_tiebreaking_order(tieBreakingOrder,
                                     weightDistribution);
   else
      myTiebreakingOrder = CatDist.tiebreaking_order(weightDistribution);
   
   ConstCategorizer leafCat = null;

   // @@ this is silly. We compute the majority category, make a ConstCat for
   // it, then turn around and predict a different category (the one that
   // produces the least loss). We use this majority to compute the number of
   // errors, even if we don't predict it!
   int majority = CatDist.majority_category(weightDistribution,
						  myTiebreakingOrder);

   if(tddtOptions.leafDistType == allOrNothing) {
      AugCategory	augMajority = new AugCategory(majority,
		     TS.get_schema().category_to_label_string(majority));
      logOptions.LOG(3, "All-or-nothing Leaf is: "); 
      leafCat = new ConstCategorizer(" ", augMajority, TS.get_schema());
      AugCategory bestPrediction = leafCat.get_category();
      logOptions.LOG(3, ""+bestPrediction.toString()+'\n');
      String myDescr = bestPrediction.description();//.read_rep();
      leafCat.set_description(myDescr);
   } else {
      double[] fCounts = weightDistribution;
      CatDist cDist = null;
      switch (tddtOptions.leafDistType) {
	 case frequencyCounts:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.none);
	    logOptions.LOG(3, "Frequency-count Leaf is: ");
	    break;
	 case laplaceCorrection:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.laplace,
				tddtOptions.MEstimateFactor);
	    logOptions.LOG(3, "Laplace Leaf is: ");
	    break;
	 case evidenceProjection:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.evidence,
				tddtOptions.evidenceFactor);
	    logOptions.LOG(3, "Evidence Leaf is: ");
	    break;
	 default:
	    MLJ.Abort();
      }
      logOptions.LOG(3, ""+cDist+'\n');
      cDist.set_tiebreaking_order(myTiebreakingOrder);
      AugCategory bestCategory = cDist.best_category();
      String myDescr = bestCategory.description();//.read_rep();
      leafCat = new ConstCategorizer(myDescr, cDist, TS.get_schema());
//      DBG(ASSERT(cDist == null));
   }

   myTiebreakingOrder = null; //delete myTiebreakingOrder;
   
//   ASSERT(leafCat);

//   DBGSLOW(
//	   InstanceRC dummy(leafCat.get_schema());
//	   MStringRC predDescr = leafCat.categorize(dummy).description();
//	   if (predDescr != leafCat.description())
//	      Error.fatalErr("cat descriptions don't match: I picked "
//	          +leafCat.description()+", leafCat predicted "
//	          +predDescr+". CatDist is "+leafCat.get_cat_dist());
//	   );
   
   if (distrArray != null) {
      double[] newDistr = new double[distrArray.length - 1];//(0, distrArray.length - 1, 0);
      for (int i = 0; i < newDistr.length; i++)
	    newDistr[i] = distrArray[i];
      leafCat.set_distr(newDistr);
   } else {
      // Use coarser granularity when approx_equal invoked with floats.
      if (MLJ.approx_equal((float)totalWeight,0.0)
	 && !tddtOptions.emptyNodeParentDist) {
	 double[] disArray = new double[TS.num_categories()];// (0, TS.num_categories(), 0);
	 leafCat.set_distr(disArray);
      } else
	 leafCat.build_distr(instance_list());
   }

   // If there are no instances, we predict like the parent and
   //   the penalty for pessimistic errors comes from the other children.
   //   Note that we can't just call num_cat because the distribution
   //   may be the parent's distribution
   // Use coarser granularity when approx_equal invoked with floats.
   if (MLJ.approx_equal((float)totalWeight,0.0)) {
      numErrors.value = 0;
      pessimisticErrors.value = 0;
   } else {
      numErrors.value = num_cat_errors(leafCat, majority, totalWeight);
      pessimisticErrors.value = CatTestResult.pessimistic_error_correction(
                          numErrors.value, totalWeight, get_pruning_factor());
   }

//   ASSERT(numErrors >= 0);
//   ASSERT(pessimisticErrors >= 0);
/*
   if (get_debug()) {
      int numChars = 128;
      char buffer[numChars];
      for (int chr = 0; chr < numChars; chr++)
	   buffer[chr] = '\0';
      MLCOStream *stream = new MLCOStream(EMPTY_STRING, buffer, numChars);
      CatDist score = leafCat.get_cat_dist();
      *stream << score.get_scores();
      String pDist = stream.mem_buf();
      stream = null; //delete stream;
      stream = new MLCOStream(EMPTY_STRING, buffer, numChars);
      *stream << leafCat.get_distr();

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲激情自拍偷拍| 欧美精品丝袜中出| 在线视频你懂得一区| 欧美日本不卡视频| 精品av综合导航| 国产精品久久影院| 亚洲444eee在线观看| 国产在线精品免费| 91一区二区三区在线播放| 欧美精品色综合| 国产免费成人在线视频| 亚洲一二三专区| 国产一区二区中文字幕| 色婷婷久久99综合精品jk白丝| 91精品麻豆日日躁夜夜躁| 国产精品色眯眯| 天天操天天色综合| 成人在线一区二区三区| 欧美一区在线视频| 综合久久综合久久| 精品中文字幕一区二区| 色综合激情五月| 2019国产精品| 五月综合激情日本mⅴ| 成人妖精视频yjsp地址| 欧美精选一区二区| 椎名由奈av一区二区三区| 日韩专区一卡二卡| 91影视在线播放| 久久精品视频在线看| 午夜免费欧美电影| 91丨九色丨黑人外教| 91精品一区二区三区久久久久久| 国产精品久久久久影院| 青青草97国产精品免费观看| 91在线无精精品入口| 久久综合久久99| 欧美日韩第一区日日骚| 亚洲国产精品成人综合| 蜜桃视频在线一区| 欧美主播一区二区三区美女| 亚洲欧洲一区二区在线播放| 激情五月婷婷综合| 在线播放91灌醉迷j高跟美女 | 国产一区二区成人久久免费影院 | 午夜亚洲福利老司机| 成人av网址在线观看| 26uuu成人网一区二区三区| 亚洲成人在线网站| 91丨九色丨蝌蚪丨老版| 国产精品乱人伦| 国产精品自拍在线| 日韩精品一区二| 婷婷夜色潮精品综合在线| 日本高清无吗v一区| 国产精品每日更新| 国产成人精品亚洲777人妖 | 国产一区在线精品| 日韩欧美区一区二| 日本网站在线观看一区二区三区 | 日本一不卡视频| 欧美日韩国产首页在线观看| 亚洲一二三四在线观看| 色婷婷狠狠综合| 亚洲乱码精品一二三四区日韩在线 | 91成人看片片| 亚洲精品欧美激情| 色播五月激情综合网| 亚洲三级电影网站| 91麻豆精东视频| 亚洲黄色在线视频| 日本韩国欧美国产| 亚洲电影一级黄| 欧美日本一区二区三区四区| 午夜视频在线观看一区二区| 欧美久久久影院| 蜜臀av性久久久久蜜臀aⅴ四虎| 欧美一级艳片视频免费观看| 日本aⅴ精品一区二区三区| 欧美成人r级一区二区三区| 美女视频网站黄色亚洲| 日韩欧美的一区二区| 极品少妇xxxx精品少妇| 久久免费美女视频| 成人av在线影院| 亚洲乱码国产乱码精品精小说 | 午夜精品久久久| 欧美一区二区三区啪啪| 老色鬼精品视频在线观看播放| 欧美成人一级视频| 国产精品911| 国产精品第13页| 欧美在线观看一区| 首页国产欧美久久| 久色婷婷小香蕉久久| 久久婷婷久久一区二区三区| 丁香啪啪综合成人亚洲小说| 亚洲女与黑人做爰| 欧美精品在线观看播放| 国产在线视频精品一区| 国产精品不卡一区二区三区| 欧美亚洲愉拍一区二区| 美女视频黄 久久| 国产人妖乱国产精品人妖| 91影院在线免费观看| 日韩中文欧美在线| 久久久久国产成人精品亚洲午夜| 91亚洲精品久久久蜜桃| 午夜视频一区在线观看| 精品精品国产高清a毛片牛牛| 国产91在线观看| 亚洲成a人v欧美综合天堂 | 国产精品一区二区在线播放| 国产精品国产三级国产aⅴ无密码| 在线观看欧美精品| 久久精品国内一区二区三区| 欧美国产视频在线| 欧美日韩一区国产| 韩国午夜理伦三级不卡影院| 亚洲男人都懂的| 欧美不卡一区二区三区| 91亚洲大成网污www| 美美哒免费高清在线观看视频一区二区| 久久精品人人做人人综合| 在线欧美日韩精品| 国产在线播放一区三区四| 亚洲激情自拍视频| 久久蜜臀精品av| 欧美日韩五月天| 成人妖精视频yjsp地址| 蜜乳av一区二区三区| 亚洲免费观看高清| 26uuu久久综合| 欧美日韩国产区一| 91在线视频在线| 黑人巨大精品欧美黑白配亚洲| 一区二区三区资源| 国产视频在线观看一区二区三区| 欧美日韩精品一区二区天天拍小说 | 欧美亚洲禁片免费| 国产成人免费在线| 日韩精彩视频在线观看| 国产91对白在线观看九色| 久久久久久97三级| 精品亚洲免费视频| 亚洲自拍与偷拍| 午夜在线成人av| 亚洲香肠在线观看| 欧美三级乱人伦电影| 三级久久三级久久久| 亚洲综合视频在线| 国产精品三级av| 美日韩一区二区| 国产精品久久二区二区| 亚洲视频 欧洲视频| 99国产欧美另类久久久精品| 国模无码大尺度一区二区三区| 蜜臀精品久久久久久蜜臀| 国产精品狼人久久影院观看方式| 国产精品久久久久久久久免费桃花 | 亚洲一区二区在线视频| 国产亚洲欧美日韩在线一区| 久久奇米777| 欧美精品一区二区不卡| 久久众筹精品私拍模特| 亚洲精品在线免费播放| 丝袜亚洲另类欧美综合| 亚洲欧洲成人自拍| 中文字幕一区二区三区视频| 国产精品久久久久久久久免费相片 | 国产99一区视频免费| 国产一区二区三区| 国产九色sp调教91| 韩国成人福利片在线播放| 精品一区二区久久| 国产精品白丝jk黑袜喷水| 91色porny蝌蚪| 亚洲三级在线观看| 国产性做久久久久久| 国产人久久人人人人爽| 国产精品你懂的在线欣赏| 国产日韩欧美精品在线| 综合婷婷亚洲小说| 午夜精彩视频在线观看不卡| 三级欧美韩日大片在线看| 国产综合色视频| av不卡一区二区三区| 在线日韩av片| 欧美精品一区二| 亚洲精品日韩一| 激情综合网av| 在线看不卡av| 国产亚洲视频系列| 亚洲大片一区二区三区| 国产黄色成人av| 欧美精品777| 亚洲精品v日韩精品| 麻豆91在线看| 欧美日韩精品系列| 国产精品久久久久久一区二区三区|