亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? tddtinducer.java

?? 數據倉庫挖掘與開發 ID3算法實現代碼
?? JAVA
?? 第 1 頁 / 共 5 頁
字號:
    * leading to them.
    */
   public int num_nontrivial_leaves()
   {
      was_trained(true);
      return decisionTreeCat.num_nontrivial_leaves();
   }

   /** Checks if this inducer has a valid decision tree.
    * @return True iff the class has a valid decisionTree categorizer.
    * @param fatalOnFalse TRUE if an error message should be displayed if the inducer is not trained,
    * FALSE otherwise.
    */
   public boolean was_trained(boolean fatalOnFalse)
   {
      if(fatalOnFalse && decisionTreeCat == null)
         Error.err("TDDTInducer.was_trained: No decision tree categorizer. "
	           + " Call train() to create categorizer -->fatal_error");
      return decisionTreeCat != null;
   }

   /** Induce a decision tree in the given graph.
    * @param aCgraph                  The graph that will contain the decision tree.
    * @param tieBreakingOrder The tie breaking order for breaking distribution ties.
    * @param numSubtreeErrors Number of errors to this point.
    * @param pessimisticSubtreeErrors Error estimate if this was a leaf node.
    * @param numLeaves                The number of leaves in the decision tree.
    * @param remainingSiblings Siblings that have not be induced yet.
    * @return The root node of the decision tree.
    */
   public Node induce_decision_tree(CGraph aCgraph, int[] tieBreakingOrder, DoubleRef numSubtreeErrors, DoubleRef pessimisticSubtreeErrors, IntRef numLeaves, int remainingSiblings)
   {
      if (TS.no_weight())
         Error.fatalErr("TDDTInducer.induce_decision_tree: list has zero weight");
   
//      DBG(pessimisticSubtreeErrors = -1);
      // Create a decision tree object to allow building nodes in the CGraph.
      DecisionTree decisionTree = new DecisionTree(aCgraph);

      // Display training InstanceList. -JL
      logOptions.LOG(4, "Training set ="+'\n'+TS.out(false)+'\n');

      LinkedList catNames = new LinkedList();
//      catNames[0] = null;
      NodeCategorizer[] rootCat = new NodeCategorizer[1];
 	rootCat[0] = best_split(catNames);
      if (rootCat[0] == null) {
         rootCat[0] = create_leaf_categorizer(TS.total_weight(),
					tieBreakingOrder, numSubtreeErrors,
					pessimisticSubtreeErrors);
         // We catch empty leaves in induce_tree_from_split. Hence, this can't be
         // a trivial leaf.
//         MLJ.ASSERT(MLJ.approx_greater(rootCat[0].total_weight(), 0),"TDDTInducer.induce_decision_tree: MLJ.approx_greater(rootCat[0].total_weight(), 0) == false");
         numLeaves.value = 1;
         decisionTree.set_root(decisionTree.create_node(rootCat, get_level()));
         MLJ.ASSERT(rootCat[0] == null,"TDDTInducer.induce_decision_tree: rootCat[0] != null"); // create_node gets ownership
//         IFDRIBBLE(dribble_level(level, "Leaf node", remainingSiblings));
      } else {
         NodeCategorizer splitCat = rootCat[0];
         decisionTree.set_root(decisionTree.create_node(rootCat, get_level()));
         MLJ.ASSERT(rootCat[0] == null,"TDDTInducer.induce_decision_tree: rootCat[0] != null"); // create_node gets ownership
         induce_tree_from_split(decisionTree, splitCat, catNames,
			     tieBreakingOrder, numSubtreeErrors,
			     pessimisticSubtreeErrors, numLeaves,
			     remainingSiblings);
      }

      catNames = null;
//      DBG(catNames = null);
      logOptions.LOG(6, "TDDT returning " + decisionTree.get_root() +'\n');

      MLJ.ASSERT(pessimisticSubtreeErrors.value >= 0,"TDDTInducer.induce_decision_tree: pessimisticSubtreeErrors.value < 0");
      return decisionTree.get_root();

//      System.out.println("Warning-->TDDTInducer.induce_decision_tree"
//         +" not implemented yet");
//      return null; 
   }

   /** Builds a decision tree categorizer for the given DecisionTree.
    * @param dTree The DecisionTree to use for creating the categorizer.
    */
   protected void build_categorizer(DecisionTree dTree)
   {
      decisionTreeCat = null;
      decisionTreeCat = new DTCategorizer(dTree, description(),
                                          TS.num_categories(),
                                          TS.get_schema());
      
      decisionTreeCat.set_leaf_dist_params(tddtOptions.leafDistType,
                                           tddtOptions.MEstimateFactor,
					   tddtOptions.evidenceFactor);
      //ASSERT(dtree==null);
   }

   /** Best_split finds the best split in the node and returns a categorizer
    * implementing it. It allocates and returns catNames containing the names of the
    * resulting categories.
    *
    * @param catNames The list of categories found in the Node.
    * @return The Categorizer using the list of categories.
    */
   abstract public NodeCategorizer best_split(LinkedList catNames);

   /** Computes the number of errors this node would make as a leaf. If
    * totalWeight is zero, the distribution is ignored, else totalWeight
    * must be the sum of the distribution counts.
    * @return The number of errors this node would make if it were a leaf
    * on the decision tree.
    * @param cat			The Categorizer for the node being checked.
    * @param predictClass	The category for which this node is being
    * checked.
    * @param totalWeight	The weight of all instances in a data set.
    */
   protected static double num_cat_errors(Categorizer cat, int predictClass, double totalWeight)
{
   double numErrors = 0;
   if (!cat.has_distr())
      Error.fatalErr("TDDTInducer.num_cat_errors: Categorizer has no distribution");
//   DBG(
//      const double[]& dist = cat.get_distr();
//      double sum = dist.sum();
//      // if (numInstances > 0) @@ will this fail?  If yes, comment why
//      MLJ.verify_approx_equal((StoredReal)sum, (StoredReal)totalWeight,
//                              "TDDTInducer.num_cat_errors: summation of "
//			      "distribution fails to equal number of "
//			      "instances", 100);
//      );  

   if (totalWeight > 0) { // we're not an empty leaf
      double numPredict = cat.get_distr()[predictClass - Globals.FIRST_NOMINAL_VAL];
      double nodeErrors = totalWeight - numPredict; // error count
//      ASSERT(nodeErrors >= 0);
      numErrors = nodeErrors; // increment parent's count of errors
   }
   return numErrors;
}

   /** Creates a leaf categorizer (has no children). We currently create
    * a ConstCategorizer with a description and the majority category.
    * Note that the augCategory will contain the correct string,
    * but the description will contain more information which may
    * help when displaying the graph. The augCategory string must
    * be the same for CatTestResult to work properly (it compares
    * the actual string for debugging purposes).
    * @return The LeafCategorizer created.
    * @param tieBreakingOrder Order for breaking distribution ties.
    * @param totalWeight The total weight of the training data set.
    * @param numErrors The number of errors this LeafCategorizer
    * will produce.
    * @param pessimisticErrors Error estimate if this was a leaf node.
    */
public LeafCategorizer create_leaf_categorizer(double totalWeight,
				     int[] tieBreakingOrder,
				     DoubleRef numErrors, DoubleRef pessimisticErrors)
{return create_leaf_categorizer(totalWeight,tieBreakingOrder,numErrors,pessimisticErrors,null);}

/** Creates a leaf categorizer (has no children). We currently create
 * a ConstCategorizer with a description and the majority category.
 * If the distrArray is given, we don't reference the training set,
 * except for its schema (used in pruning).<P>
 * Note that the augCategory will contain the correct string,
 * but the description will contain more information which may
 * help when displaying the graph. The augCategory string must
 * be the same for CatTestResult to work properly (it compares
 * the actual string for debugging purposes).
 * @return The LeafCategorizer created.
 * @param tieBreakingOrder Order for breaking distribution ties.
 * @param totalWeight The total weight of the training data set.
 * @param numErrors The number of errors this LeafCategorizer
 * will produce.
 * @param pessimisticErrors Error estimate if this was a leaf node.
 * @param distrArray Distribution of weight over labels.
 */
public LeafCategorizer create_leaf_categorizer(double totalWeight,
				     int[] tieBreakingOrder,
				     DoubleRef numErrors, DoubleRef pessimisticErrors,
				     double[] distrArray)
{
   // Find tiebreaking order.
   int[] myTiebreakingOrder = null;
   double[] weightDistribution = (distrArray!=null) ? distrArray : TS.counters().label_counts();
//   ASSERT(weightDistribution.low() == Globals.UNKNOWN_CATEGORY_VAL);
   if (tddtOptions.parentTieBreaking)
      myTiebreakingOrder = CatDist.merge_tiebreaking_order(tieBreakingOrder,
                                     weightDistribution);
   else
      myTiebreakingOrder = CatDist.tiebreaking_order(weightDistribution);
   
   ConstCategorizer leafCat = null;

   // @@ this is silly. We compute the majority category, make a ConstCat for
   // it, then turn around and predict a different category (the one that
   // produces the least loss). We use this majority to compute the number of
   // errors, even if we don't predict it!
   int majority = CatDist.majority_category(weightDistribution,
						  myTiebreakingOrder);

   if(tddtOptions.leafDistType == allOrNothing) {
      AugCategory	augMajority = new AugCategory(majority,
		     TS.get_schema().category_to_label_string(majority));
      logOptions.LOG(3, "All-or-nothing Leaf is: "); 
      leafCat = new ConstCategorizer(" ", augMajority, TS.get_schema());
      AugCategory bestPrediction = leafCat.get_category();
      logOptions.LOG(3, ""+bestPrediction.toString()+'\n');
      String myDescr = bestPrediction.description();//.read_rep();
      leafCat.set_description(myDescr);
   } else {
      double[] fCounts = weightDistribution;
      CatDist cDist = null;
      switch (tddtOptions.leafDistType) {
	 case frequencyCounts:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.none);
	    logOptions.LOG(3, "Frequency-count Leaf is: ");
	    break;
	 case laplaceCorrection:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.laplace,
				tddtOptions.MEstimateFactor);
	    logOptions.LOG(3, "Laplace Leaf is: ");
	    break;
	 case evidenceProjection:
	    cDist = new CatDist(TS.get_schema(), fCounts, CatDist.evidence,
				tddtOptions.evidenceFactor);
	    logOptions.LOG(3, "Evidence Leaf is: ");
	    break;
	 default:
	    MLJ.Abort();
      }
      logOptions.LOG(3, ""+cDist+'\n');
      cDist.set_tiebreaking_order(myTiebreakingOrder);
      AugCategory bestCategory = cDist.best_category();
      String myDescr = bestCategory.description();//.read_rep();
      leafCat = new ConstCategorizer(myDescr, cDist, TS.get_schema());
//      DBG(ASSERT(cDist == null));
   }

   myTiebreakingOrder = null; //delete myTiebreakingOrder;
   
//   ASSERT(leafCat);

//   DBGSLOW(
//	   InstanceRC dummy(leafCat.get_schema());
//	   MStringRC predDescr = leafCat.categorize(dummy).description();
//	   if (predDescr != leafCat.description())
//	      Error.fatalErr("cat descriptions don't match: I picked "
//	          +leafCat.description()+", leafCat predicted "
//	          +predDescr+". CatDist is "+leafCat.get_cat_dist());
//	   );
   
   if (distrArray != null) {
      double[] newDistr = new double[distrArray.length - 1];//(0, distrArray.length - 1, 0);
      for (int i = 0; i < newDistr.length; i++)
	    newDistr[i] = distrArray[i];
      leafCat.set_distr(newDistr);
   } else {
      // Use coarser granularity when approx_equal invoked with floats.
      if (MLJ.approx_equal((float)totalWeight,0.0)
	 && !tddtOptions.emptyNodeParentDist) {
	 double[] disArray = new double[TS.num_categories()];// (0, TS.num_categories(), 0);
	 leafCat.set_distr(disArray);
      } else
	 leafCat.build_distr(instance_list());
   }

   // If there are no instances, we predict like the parent and
   //   the penalty for pessimistic errors comes from the other children.
   //   Note that we can't just call num_cat because the distribution
   //   may be the parent's distribution
   // Use coarser granularity when approx_equal invoked with floats.
   if (MLJ.approx_equal((float)totalWeight,0.0)) {
      numErrors.value = 0;
      pessimisticErrors.value = 0;
   } else {
      numErrors.value = num_cat_errors(leafCat, majority, totalWeight);
      pessimisticErrors.value = CatTestResult.pessimistic_error_correction(
                          numErrors.value, totalWeight, get_pruning_factor());
   }

//   ASSERT(numErrors >= 0);
//   ASSERT(pessimisticErrors >= 0);
/*
   if (get_debug()) {
      int numChars = 128;
      char buffer[numChars];
      for (int chr = 0; chr < numChars; chr++)
	   buffer[chr] = '\0';
      MLCOStream *stream = new MLCOStream(EMPTY_STRING, buffer, numChars);
      CatDist score = leafCat.get_cat_dist();
      *stream << score.get_scores();
      String pDist = stream.mem_buf();
      stream = null; //delete stream;
      stream = new MLCOStream(EMPTY_STRING, buffer, numChars);
      *stream << leafCat.get_distr();

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美福利视频导航| 一区二区视频在线| 亚洲男女一区二区三区| 夜夜嗨av一区二区三区网页 | 91一区二区在线观看| 欧美精品1区2区| 亚洲男人天堂av网| 东方aⅴ免费观看久久av| 欧美日韩一区高清| 亚洲欧美国产77777| 蓝色福利精品导航| 欧美精品日韩精品| 一区二区三区高清在线| 国产成人欧美日韩在线电影| 欧美制服丝袜第一页| 久久久久久久国产精品影院| 一区二区三区在线观看欧美| 久久99国产精品成人| 欧美色视频在线| 亚洲主播在线观看| 一本到不卡免费一区二区| 久久久五月婷婷| 国产酒店精品激情| 久久九九久精品国产免费直播| 一区二区三区四区av| thepron国产精品| 中文字幕二三区不卡| 国产一区二区视频在线播放| 欧美视频在线播放| 亚洲高清三级视频| 欧美日韩综合在线免费观看| 国产精品视频一二三| 国产精品一级片在线观看| 精品裸体舞一区二区三区| 麻豆成人av在线| 精品国产网站在线观看| 日韩激情视频网站| 欧美tk—视频vk| 国产一区二区久久| 国产精品麻豆视频| 一本久久综合亚洲鲁鲁五月天 | 国产精品全国免费观看高清 | 亚洲精品你懂的| 欧美日韩视频在线第一区 | 国产在线麻豆精品观看| 欧美日韩亚州综合| 免费一级欧美片在线观看| 91精品国产综合久久精品性色| 亚洲一区二区精品久久av| 精品视频一区 二区 三区| 一区二区三区av电影| 欧洲精品一区二区三区在线观看| 一区二区在线看| 91精品国产91热久久久做人人| 五月婷婷欧美视频| 久久综合久久99| 97成人超碰视| 天天影视涩香欲综合网| 精品日产卡一卡二卡麻豆| 国产乱国产乱300精品| 欧美激情中文字幕| 欧美性大战久久久| 精品亚洲aⅴ乱码一区二区三区| 精品理论电影在线| 91丨porny丨国产| 日韩av一区二区三区四区| wwwwww.欧美系列| 91麻豆免费观看| 久久爱www久久做| 一区二区三区在线免费播放 | 一区二区三区四区在线播放| 在线亚洲欧美专区二区| 看片的网站亚洲| 亚洲女子a中天字幕| 欧美一级淫片007| 99热国产精品| 国产又黄又大久久| 香蕉久久一区二区不卡无毒影院 | 91丝袜美腿高跟国产极品老师 | 麻豆精品视频在线观看免费| 精品国产一区二区在线观看| 国产精品主播直播| 奇米777欧美一区二区| 国产精品久久久爽爽爽麻豆色哟哟| 在线观看91视频| 不卡av电影在线播放| 免费高清不卡av| 亚洲福利一二三区| 亚洲欧洲日韩av| 国产校园另类小说区| 欧美三级在线看| 一本久久精品一区二区| 激情亚洲综合在线| 日韩电影一区二区三区| 亚洲人被黑人高潮完整版| 精品久久一区二区三区| 欧美日韩精品免费| 91免费视频观看| 成人h动漫精品一区二区| 麻豆国产欧美日韩综合精品二区| 亚洲免费观看高清完整版在线| 精品对白一区国产伦| 欧美在线观看一二区| 成人国产一区二区三区精品| 日韩不卡在线观看日韩不卡视频| 日韩毛片高清在线播放| 久久精品一区二区三区四区| 精品视频1区2区| 在线看日本不卡| 色婷婷狠狠综合| 色综合咪咪久久| 色综合久久久久| 在线观看成人小视频| 91在线视频观看| 色播五月激情综合网| 成人av电影在线播放| 国产不卡一区视频| av成人老司机| 在线一区二区视频| 欧美高清激情brazzers| 欧美猛男gaygay网站| 欧美日韩一区二区不卡| 欧美日韩精品一区二区三区| 日本韩国一区二区| 欧美性生交片4| 欧美一区二区在线播放| 欧美一区二区三区电影| 日韩欧美美女一区二区三区| 欧美日韩不卡一区二区| 日韩一区二区免费在线观看| 欧美亚一区二区| 日韩一区二区三区在线视频| 欧美日韩日日骚| 亚洲精品一线二线三线| 日本一区二区免费在线观看视频| 国产日韩欧美一区二区三区综合| 国产日韩成人精品| 亚洲综合无码一区二区| 亚洲成人资源在线| 国精品**一区二区三区在线蜜桃 | 亚洲精品国产第一综合99久久| 亚洲免费观看高清完整版在线观看熊| 亚洲国产成人高清精品| 日本中文字幕一区二区有限公司| 精品无人区卡一卡二卡三乱码免费卡| 国产一区二区不卡| 色吊一区二区三区| 久久伊人中文字幕| 一区二区三区免费| 狠狠色狠狠色合久久伊人| 国产成人精品aa毛片| 91福利视频久久久久| 91精品国产综合久久久蜜臀图片| 久久色.com| 亚洲中国最大av网站| 蜜臀久久99精品久久久久久9| 国产成人免费在线观看不卡| 成人免费看视频| 91精品国产综合久久精品图片| 久久―日本道色综合久久| 亚洲黄色录像片| 国产一区二区精品久久99| 91久久香蕉国产日韩欧美9色| 91麻豆精品国产91久久久使用方法| 久久久久久久久久久久久夜| 国产精品女主播av| 久久狠狠亚洲综合| 欧美羞羞免费网站| 亚洲视频资源在线| 国产电影一区在线| 日韩精品中文字幕一区| 亚洲卡通欧美制服中文| 久久国产精品99精品国产| 91网上在线视频| 国产精品女主播在线观看| 美女视频一区在线观看| 色婷婷综合中文久久一本| 精品国产乱码久久久久久免费| 亚洲免费观看高清在线观看| 日本在线不卡视频| 欧美日韩在线播放三区| 国产精品超碰97尤物18| 美腿丝袜亚洲三区| 欧美夫妻性生活| 午夜精品成人在线视频| 99久久精品一区| 国产精品国产a| 成人一级视频在线观看| 26uuu国产电影一区二区| 日韩精品欧美成人高清一区二区| 99re成人精品视频| 国产精品电影一区二区三区| 麻豆精品久久精品色综合| 欧美三日本三级三级在线播放| 日韩一区欧美小说| 色狠狠一区二区三区香蕉| 综合婷婷亚洲小说| 日本韩国精品在线| 亚洲国产成人91porn| 欧美三级午夜理伦三级中视频|