亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? tddtinducer.java

?? java數據挖掘算法
?? JAVA
?? 第 1 頁 / 共 5 頁
字號:
    * leading to them.
    */
   public int num_nontrivial_leaves()
   {
      was_trained(true);
      return decisionTreeCat.num_nontrivial_leaves();
   }

   /** Checks if this inducer has a valid decision tree.
    * @return True iff the class has a valid decisionTree categorizer.
    * @param fatalOnFalse TRUE if an error message should be displayed if the inducer is not trained,
    * FALSE otherwise.
    */
   public boolean was_trained(boolean fatalOnFalse)
   {
      if(fatalOnFalse && decisionTreeCat == null)
         Error.err("TDDTInducer.was_trained: No decision tree categorizer. "
	           + " Call train() to create categorizer -->fatal_error");
      return decisionTreeCat != null;
   }

   /** Induce a decision tree in the given graph.
    * @param aCgraph                  The graph that will contain the decision tree.
    * @param tieBreakingOrder The tie breaking order for breaking distribution ties.
    * @param numSubtreeErrors Number of errors to this point.
    * @param pessimisticSubtreeErrors Error estimate if this was a leaf node.
    * @param numLeaves                The number of leaves in the decision tree.
    * @param remainingSiblings Siblings that have not be induced yet.
    * @return The root node of the decision tree.
    */
   public Node induce_decision_tree(CGraph aCgraph, int[] tieBreakingOrder, DoubleRef numSubtreeErrors, DoubleRef pessimisticSubtreeErrors, IntRef numLeaves, int remainingSiblings)
   {
      if (TS.no_weight())
         Error.fatalErr("TDDTInducer.induce_decision_tree: list has zero weight");
   
//      DBG(pessimisticSubtreeErrors = -1);
      // Create a decision tree object to allow building nodes in the CGraph.
      DecisionTree decisionTree = new DecisionTree(aCgraph);

      // Display training InstanceList. -JL
      logOptions.LOG(4, "Training set ="+'\n'+TS.out(false)+'\n');

      LinkedList catNames = new LinkedList();
//      catNames[0] = null;
      NodeCategorizer[] rootCat = new NodeCategorizer[1];
 	rootCat[0] = best_split(catNames);
      if (rootCat[0] == null) {
         rootCat[0] = create_leaf_categorizer(TS.total_weight(),
					tieBreakingOrder, numSubtreeErrors,
					pessimisticSubtreeErrors);
         // We catch empty leaves in induce_tree_from_split. Hence, this can't be
         // a trivial leaf.
//         MLJ.ASSERT(MLJ.approx_greater(rootCat[0].total_weight(), 0),"TDDTInducer.induce_decision_tree: MLJ.approx_greater(rootCat[0].total_weight(), 0) == false");
         numLeaves.value = 1;
         decisionTree.set_root(decisionTree.create_node(rootCat, get_level()));
         MLJ.ASSERT(rootCat[0] == null,"TDDTInducer.induce_decision_tree: rootCat[0] != null"); // create_node gets ownership
//         IFDRIBBLE(dribble_level(level, "Leaf node", remainingSiblings));
      } else {
         NodeCategorizer splitCat = rootCat[0];
         decisionTree.set_root(decisionTree.create_node(rootCat, get_level()));
         MLJ.ASSERT(rootCat[0] == null,"TDDTInducer.induce_decision_tree: rootCat[0] != null"); // create_node gets ownership
         induce_tree_from_split(decisionTree, splitCat, catNames,
			     tieBreakingOrder, numSubtreeErrors,
			     pessimisticSubtreeErrors, numLeaves,
			     remainingSiblings);
      }

      catNames = null;
//      DBG(catNames = null);
      logOptions.LOG(6, "TDDT returning " + decisionTree.get_root() +'\n');

      MLJ.ASSERT(pessimisticSubtreeErrors.value >= 0,"TDDTInducer.induce_decision_tree: pessimisticSubtreeErrors.value < 0");
      return decisionTree.get_root();

//      System.out.println("Warning-->TDDTInducer.induce_decision_tree"
//         +" not implemented yet");
//      return null; 
   }

   /** Builds a decision tree categorizer for the given DecisionTree.
    * @param dTree The DecisionTree to use for creating the categorizer.
    */
   protected void build_categorizer(DecisionTree dTree)
   {
      decisionTreeCat = null;
      decisionTreeCat = new DTCategorizer(dTree, description(),
                                          TS.num_categories(),
                                          TS.get_schema());
      
      decisionTreeCat.set_leaf_dist_params(tddtOptions.leafDistType,
                                           tddtOptions.MEstimateFactor,
					   tddtOptions.evidenceFactor);
      //ASSERT(dtree==null);
   }

   /** Best_split finds the best split in the node and returns a categorizer
    * implementing it. It allocates and returns catNames containing the names of the
    * resulting categories.
    *
    * @param catNames The list of categories found in the Node.
    * @return The Categorizer using the list of categories.
    */
   abstract public NodeCategorizer best_split(LinkedList catNames);

   /** Computes the number of errors this node would make as a leaf. If
    * totalWeight is zero, the distribution is ignored, else totalWeight
    * must be the sum of the distribution counts.
    * @return The number of errors this node would make if it were a leaf
    * on the decision tree.
    * @param cat			The Categorizer for the node being checked.
    * @param predictClass	The category for which this node is being
    * checked.
    * @param totalWeight	The weight of all instances in a data set.
    */
   protected static double num_cat_errors(Categorizer cat, int predictClass, double totalWeight)
{
   double numErrors = 0;
   if (!cat.has_distr())
      Error.fatalErr("TDDTInducer.num_cat_errors: Categorizer has no distribution");
//   DBG(
//      const double[]& dist = cat.get_distr();
//      double sum = dist.sum();
//      // if (numInstances > 0) @@ will this fail?  If yes, comment why
//      MLJ.verify_approx_equal((StoredReal)sum, (StoredReal)totalWeight,
//                              "TDDTInducer.num_cat_errors: summation of "
//			      "distribution fails to equal number of "
//			      "instances", 100);
//      );  

   if (totalWeight > 0) { // we're not an empty leaf
      double numPredict = cat.get_distr()[predictClass - Globals.FIRST_NOMINAL_VAL];
      double nodeErrors = totalWeight - numPredict; // error count
//      ASSERT(nodeErrors >= 0);
      numErrors = nodeErrors; // increment parent's count of errors
   }
   return numErrors;
}

   /** Creates a leaf categorizer (has no children). We currently create
    * a ConstCategorizer with a description and the majority category.
    * Note that the augCategory will contain the correct string,
    * but the description will contain more information which may
    * help when displaying the graph. The augCategory string must
    * be the same for CatTestResult to work properly (it compares
    * the actual string for debugging purposes).
    * @return The LeafCategorizer created.
    * @param tieBreakingOrder Order for breaking distribution ties.
    * @param totalWeight The total weight of the training data set.
    * @param numErrors The number of errors this LeafCategorizer
    * will produce.
    * @param pessimisticErrors Error estimate if this was a leaf node.
    */
public LeafCategorizer create_leaf_categorizer(double totalWeight,
				     int[] tieBreakingOrder,
				     DoubleRef numErrors, DoubleRef pessimisticErrors)
{return create_leaf_categorizer(totalWeight,tieBreakingOrder,numErrors,pessimisticErrors,null);}

/** Creates a leaf categorizer (has no children). We currently create
 * a ConstCategorizer with a description and the majority category.
 * If the distrArray is given, we don't reference the training set,
 * except for its schema (used in pruning).<P>
 * Note that the augCategory will contain the correct string,
 * but the description will contain more information which may
 * help when displaying the graph. The augCategory string must
 * be the same for CatTestResult to work properly (it compares
 * the actual string for debugging purposes).
 * @return The LeafCategorizer created.
 * @param tieBreakingOrder Order for breaking distribution ties.
 * @param totalWeight The total weight of the training data set.
 * @param numErrors The number of errors this LeafCategorizer
 * will produce.
 * @param pessimisticErrors Error estimate if this was a leaf node.
 * @param distrArray Distribution of weight over labels.
 */
public LeafCategorizer create_leaf_categorizer(double totalWeight,
				     int[] tieBreakingOrder,
				     DoubleRef numErrors, DoubleRef pessimisticErrors,
				     double[] distrArray)
{
   // Find tiebreaking order.
   int[] myTiebreakingOrder = null;
   double[] weightDistribution = (distrArray!=null) ? distrArray : TS.counters().label_counts();
//   ASSERT(weightDistribution.low() == Globals.UNKNOWN_CATEGORY_VAL);
   if (tddtOptions.parentTieBreaking)
      myTiebreakingOrder = CatDist.merge_tiebreaking_order(tieBreakingOrder,
                                     weightDistribution);
   else
      myTiebreakingOrder = CatDist.tiebreaking_order(weightDistribution);
   
   ConstCategorizer leafCat = null;

   // @@ this is silly. We compute the majority category, make a ConstCat for
   // it, then turn around and predict a different category (the one that
   // produces the least loss). We use this majority to compute the number of
   // errors, even if we don't predict it!
   int majority = CatDist.majority_category(weightDistribution,
						  myTiebreakingOrder);

   if(tddtOptions.leafDistType == allOrNothing) {
      AugCategory	augMajority = new AugCategory(majority,
		     TS.get_schema().category_to_label_string(majority));
      logOptions.LOG(3, "All-or-nothing Leaf is: "); 
      leafCat = new ConstCategorizer(" ", augMajority, TS.get_schema());
      AugCategory bestPrediction = leafCat.get_category();
      logOptions.LOG(3, ""+bestPrediction.toString()+'\n');
      String myDescr = bestPrediction.description();//.read_rep();
      leafCat.set_description(myDescr);
   } else {
      double[] fCounts = weightDistribution;
      CatDist cDist = null;
      switch (tddtOptions.leafDistType) {
	 case frequencyCounts:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.none);
	    logOptions.LOG(3, "Frequency-count Leaf is: ");
	    break;
	 case laplaceCorrection:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.laplace,
				tddtOptions.MEstimateFactor);
	    logOptions.LOG(3, "Laplace Leaf is: ");
	    break;
	 case evidenceProjection:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.evidence,
				tddtOptions.evidenceFactor);
	    logOptions.LOG(3, "Evidence Leaf is: ");
	    break;
	 default:
	    MLJ.Abort();
      }
      logOptions.LOG(3, ""+cDist+'\n');
      cDist.set_tiebreaking_order(myTiebreakingOrder);
      AugCategory bestCategory = cDist.best_category();
      String myDescr = bestCategory.description();//.read_rep();
      leafCat = new ConstCategorizer(myDescr, cDist, TS.get_schema());
//      DBG(ASSERT(cDist == null));
   }

   myTiebreakingOrder = null; //delete myTiebreakingOrder;
   
//   ASSERT(leafCat);

//   DBGSLOW(
//	   InstanceRC dummy(leafCat.get_schema());
//	   MStringRC predDescr = leafCat.categorize(dummy).description();
//	   if (predDescr != leafCat.description())
//	      Error.fatalErr("cat descriptions don't match: I picked "
//	          +leafCat.description()+", leafCat predicted "
//	          +predDescr+". CatDist is "+leafCat.get_cat_dist());
//	   );
   
   if (distrArray != null) {
      double[] newDistr = new double[distrArray.length - 1];//(0, distrArray.length - 1, 0);
      for (int i = 0; i < newDistr.length; i++)
	    newDistr[i] = distrArray[i];
      leafCat.set_distr(newDistr);
   } else {
      // Use coarser granularity when approx_equal invoked with floats.
      if (MLJ.approx_equal((float)totalWeight,0.0)
	 && !tddtOptions.emptyNodeParentDist) {
	 double[] disArray = new double[TS.num_categories()];// (0, TS.num_categories(), 0);
	 leafCat.set_distr(disArray);
      } else
	 leafCat.build_distr(instance_list());
   }

   // If there are no instances, we predict like the parent and
   //   the penalty for pessimistic errors comes from the other children.
   //   Note that we can't just call num_cat because the distribution
   //   may be the parent's distribution
   // Use coarser granularity when approx_equal invoked with floats.
   if (MLJ.approx_equal((float)totalWeight,0.0)) {
      numErrors.value = 0;
      pessimisticErrors.value = 0;
   } else {
      numErrors.value = num_cat_errors(leafCat, majority, totalWeight);
      pessimisticErrors.value = CatTestResult.pessimistic_error_correction(
                          numErrors.value, totalWeight, get_pruning_factor());
   }

//   ASSERT(numErrors >= 0);
//   ASSERT(pessimisticErrors >= 0);
/*
   if (get_debug()) {
      int numChars = 128;
      char buffer[numChars];
      for (int chr = 0; chr < numChars; chr++)
	   buffer[chr] = '\0';
      MLCOStream *stream = new MLCOStream(EMPTY_STRING, buffer, numChars);
      CatDist score = leafCat.get_cat_dist();
      *stream << score.get_scores();
      String pDist = stream.mem_buf();
      stream = null; //delete stream;
      stream = new MLCOStream(EMPTY_STRING, buffer, numChars);
      *stream << leafCat.get_distr();

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
狠狠色丁香婷婷综合久久片| 一区二区视频免费在线观看| 久久精品国产精品亚洲精品| 91精品国产美女浴室洗澡无遮挡| 夜夜嗨av一区二区三区四季av| 色猫猫国产区一区二在线视频| 一区二区三区在线视频观看58| 欧美性色综合网| 亚洲高清不卡在线观看| 国产嫩草影院久久久久| eeuss影院一区二区三区| 亚洲欧美在线高清| 欧美日韩国产bt| 久久爱www久久做| 国产精品免费看片| 在线观看视频一区| 伦理电影国产精品| 日本一区二区不卡视频| 在线日韩国产精品| 久久er99精品| 亚洲日本护士毛茸茸| 在线播放一区二区三区| 国产精品亚洲午夜一区二区三区| 中文字幕一区二区三中文字幕| 在线免费观看日本一区| 日韩精品成人一区二区在线| 国产欧美中文在线| 欧美视频日韩视频在线观看| 裸体一区二区三区| 中文字幕一区二区三区四区| 91精品婷婷国产综合久久| 国产一区二区中文字幕| 亚洲色图视频网站| 日韩精品一区二区三区四区视频| 成人av资源在线| 天涯成人国产亚洲精品一区av| 国产亚洲欧美色| 欧美日韩国产首页| 成人av在线一区二区三区| 婷婷中文字幕一区三区| 中文字幕乱码久久午夜不卡 | 欧美日韩免费视频| 国产尤物一区二区| 午夜欧美在线一二页| 国产精品毛片无遮挡高清| 日韩一区二区三区四区| 色婷婷综合久久久中文一区二区| 全部av―极品视觉盛宴亚洲| 亚洲视频 欧洲视频| 久久久综合视频| 91精品国产综合久久小美女| 国产精品99久| 日韩精品一卡二卡三卡四卡无卡| 欧美性生交片4| 亚洲免费资源在线播放| 色综合久久久久久久久| 亚洲国产日韩一级| 国产精品久久免费看| 日本美女一区二区| 91小视频在线观看| 91精品一区二区三区在线观看| 成人理论电影网| 亚洲精品高清视频在线观看| 色狠狠色噜噜噜综合网| 国产麻豆视频一区| gogo大胆日本视频一区| 99国产精品99久久久久久| 亚洲精品国产无套在线观| 国产一区二区三区综合| 26uuu国产日韩综合| 国产精品久久久久桃色tv| 国产一区二区三区综合| 国产精品理论在线观看| 欧美日韩国产一级片| 成人在线视频首页| 日韩电影在线免费观看| 亚洲天堂a在线| 亚洲一区二区欧美激情| 亚洲美女免费在线| 久久久99久久精品欧美| 国产欧美日韩亚州综合| 久久蜜桃av一区二区天堂| 欧美一区二区三区四区在线观看 | 欧美mv日韩mv国产网站| 欧美综合在线视频| 国产成人av电影在线| 国产精品麻豆视频| 国产精品久久久一本精品| 91蝌蚪porny| 成人黄色免费短视频| 国内久久婷婷综合| 亚洲成人综合网站| 亚洲天堂2016| 日韩一区二区三区高清免费看看| 国产成人亚洲综合a∨婷婷图片 | 日韩综合小视频| 国产亚洲短视频| 精品国产乱码久久久久久闺蜜| 国产精品一二三区在线| 丁香婷婷综合五月| 91精品国产一区二区三区 | 五月婷婷激情综合| 天天综合天天综合色| 欧美aaa在线| 国产成人精品一区二| 国产精品一级片在线观看| 欧美日韩一级片网站| 欧美国产乱子伦| 日韩理论片一区二区| www.日韩精品| 成人精品一区二区三区中文字幕| 色欧美片视频在线观看| 91麻豆精品91久久久久久清纯 | 国产精品综合在线视频| 波波电影院一区二区三区| 在线免费av一区| 欧美一卡二卡在线| 国产精品女同一区二区三区| 亚洲一区二区中文在线| 韩国v欧美v亚洲v日本v| 色婷婷精品大视频在线蜜桃视频| 51久久夜色精品国产麻豆| 国产亚洲精品aa午夜观看| 亚洲国产精品久久久男人的天堂| 久久www免费人成看片高清| 99国产精品久久久久久久久久久| 67194成人在线观看| 中文字幕av一区二区三区免费看| 亚洲h在线观看| 成人高清视频在线| 日韩亚洲欧美综合| 亚洲综合在线观看视频| 黄色资源网久久资源365| 欧美三区免费完整视频在线观看| 国产日韩视频一区二区三区| 午夜私人影院久久久久| 成人sese在线| 久久久久一区二区三区四区| 午夜伊人狠狠久久| 99精品视频在线观看免费| 欧美xxxxx裸体时装秀| 亚洲综合免费观看高清完整版在线| 国产精品综合在线视频| 欧美一区二区日韩| 一区二区三区日本| 成人国产精品免费观看视频| 精品国产91亚洲一区二区三区婷婷| 亚洲成人一二三| 色噜噜偷拍精品综合在线| 中文在线一区二区| 国产乱子伦一区二区三区国色天香| 欧美三级视频在线| 亚洲欧洲成人自拍| 粉嫩一区二区三区在线看| 久久综合九色综合欧美98| 人人爽香蕉精品| 欧美视频自拍偷拍| 国产一区二区三区免费观看| 国产精品久久午夜夜伦鲁鲁| 国产福利一区二区三区视频| 青青草一区二区三区| 欧美日本在线观看| 久久精品国产77777蜜臀| 精品久久久网站| 国产专区欧美精品| 日本一区二区三区四区在线视频| 国产91精品露脸国语对白| 中文字幕亚洲在| 91精品国产入口在线| 国产成人在线视频网址| 久久不见久久见免费视频7| 日韩情涩欧美日韩视频| 久久亚洲捆绑美女| 一区二区三区高清不卡| 色综合久久88色综合天天6| 国产精品高清亚洲| 色88888久久久久久影院野外| 亚洲精品视频一区| 欧美亚州韩日在线看免费版国语版| 亚洲精品久久7777| 欧美日韩亚洲综合在线 | 91精品国产一区二区三区| 久久99日本精品| 久久精品一区二区三区不卡| 国产成人精品影院| 亚洲老妇xxxxxx| 69堂成人精品免费视频| 国内久久婷婷综合| 中文字幕在线免费不卡| 91麻豆高清视频| 婷婷成人激情在线网| 精品国产91久久久久久久妲己| 国产美女久久久久| 中文字幕亚洲一区二区va在线| 欧洲中文字幕精品| 久久99精品一区二区三区| 日本一区二区动态图| 在线观看国产日韩| 蜜臀久久久久久久| 中文字幕免费不卡在线|