亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? tddtinducer.java

?? 數據倉庫挖掘與開發 ID3算法實現代碼
?? JAVA
?? 第 1 頁 / 共 5 頁
字號:
      String wDist = stream.mem_buf();
      stream = null; //delete stream;
      MString& newDescr = leafCat.description();
      String dbgDescr = newDescr + " (#=" + MString(totalWeight,0) +
	 " Err=" + MString(numErrors, 0) + "/" +
	String(pessimisticErrors, 2) + ")\\npDist=" + pDist +
	 "\\nwDist=" + wDist;
      leafCat.set_description(dbgDescr);
   }
*/
   Categorizer cat = leafCat;
   
   LeafCategorizer leafCategorizer = new LeafCategorizer(cat);
//   DBG(ASSERT(cat == null));
   return leafCategorizer;
}

/** Induce decision tree from a given split. The split is provided
 * in the form of a categorizer, which picks which subtree a given
 * instance will follow.
 * @param decisionTree Decision tree induced.
 * @param splitCat The categorizer for this split.
 * @param catNames List of category names.
 * @param tieBreakingOrder Order for breaking distribution ties.
 * @param numSubtreeErrors Number of errors this subtree produces in categorization of Instances.
 * @param pessimisticSubtreeErrors Error estimate if this was a leaf node.
 * @param numLeaves Number of leaves on a subtree.
 * @param remainingSiblings Siblings that have not be induced yet.
 */
protected void induce_tree_from_split(DecisionTree decisionTree, NodeCategorizer splitCat, LinkedList catNames, int[] tieBreakingOrder, DoubleRef numSubtreeErrors, DoubleRef pessimisticSubtreeErrors, IntRef numLeaves, int remainingSiblings)
{
   int[] myTiebreakingOrder =
      CatDist.merge_tiebreaking_order(tieBreakingOrder,
				       TS.counters().label_counts());
   InstanceList[] instLists =
      splitCat.split_instance_list(instance_list());
   // Add one if we have unknown instances
//   IFDRIBBLE(dribble_level(level, splitCat.description(), remainingSiblings));
   numSubtreeErrors.value = 0;
   pessimisticSubtreeErrors.value = 0;
   numLeaves.value = 0;
   DoubleRef numChildErrors = new DoubleRef(0);
   DoubleRef childPessimisticErrors = new DoubleRef(0);
   Node largestChild = null; // with the most instances (weight)
   DoubleRef maxChildWeight = new DoubleRef(-1);   
   for (int cat = 0; cat < instLists.length; cat++) {
      if (instLists[cat].num_instances() >= instance_list().num_instances())
	 Error.fatalErr("TDDTInducer.induce_tree_from_split: the most recent split "
	     +splitCat.description()+" resulted in no reduction of the "
	     +"instance list total weight (from "
	     +instance_list().total_weight()+" to "
	     +instLists[cat].total_weight());
      int remainingChildren = instLists.length - cat;
      Node child;
      if (instLists[cat].no_weight()) {
		// No weight of instances with this value.  Make it a leaf (majority),
		//   unless category unknown.
//		if (cat != UNKNOWN_CATEGORY_VAL)
//			IFDRIBBLE(dribble_level(level+1, "Leaf node",
//				remainingChildren));
		if (get_unknown_edges() || cat != Globals.UNKNOWN_CATEGORY_VAL) { 
			logOptions.LOG(3, "Category: " + (cat - 1)//-1 added to match MLC output -JL
				+" empty.  Assigning majority"+'\n');
			NodeCategorizer[] constCat = new NodeCategorizer[1];
			constCat[0] = create_leaf_categorizer(0, myTiebreakingOrder,
				numChildErrors, childPessimisticErrors);
			if (cat != Globals.UNKNOWN_CATEGORY_VAL)
				++numLeaves.value;  // don't count trivial leaves
			MLJ.ASSERT(numChildErrors.value == 0,"TDDTInducer.induce_tree_from_split: numChildErrors.value != 0");
			MLJ.ASSERT(childPessimisticErrors.value == 0,"TDDTInducer.induce_tree_from_split: childPessimisticErrors.value != 0");
			child = decisionTree.create_node(constCat, get_level() + 1);
			MLJ.ASSERT(constCat[0] == null,"TDDTInducer.induce_tree_from_split: constCat != null");
			//create_node gets ownership
			logOptions.LOG(6, "Created child leaf "+child+'\n');
			logOptions.LOG(6, "Connecting root "+decisionTree.get_root()
				+" to child "+child
				+" with string '"+(String)catNames.get(cat)+"'"+'\n');
			connect(decisionTree, decisionTree.get_root(), child,
				cat, (String)catNames.get(cat));
		}
      } else { // Solve the problem recursively.
		CGraph aCgraph = decisionTree.get_graph();
		logOptions.LOG(3, "Recursive call"+'\n');
		double totalChildWeight = instLists[cat].total_weight();
		TDDTInducer childInducer =
		create_subinducer(name_sub_inducer(splitCat.description(), cat),
			      aCgraph);
		childInducer.set_total_inst_weight(get_total_inst_weight());
		childInducer.assign_data(instLists[cat]);
		IntRef numChildLeaves = new IntRef(0);
		child = childInducer.induce_decision_tree(aCgraph,
						    myTiebreakingOrder,
						    numChildErrors,
						    childPessimisticErrors,
						    numChildLeaves,
						    remainingChildren);
		numSubtreeErrors.value += numChildErrors.value;
		pessimisticSubtreeErrors.value += childPessimisticErrors.value;
		numLeaves.value += numChildLeaves.value;
		if (totalChildWeight > maxChildWeight.value) {
			maxChildWeight.value = totalChildWeight;
			largestChild = child;
		}
		childInducer = null; //delete childInducer;
		Node root = decisionTree.get_root();
		logOptions.LOG(6, "Connecting child "+child+" to root "
			+root+", using "+cat
			+" with string '"+(String)catNames.get(cat)+"'"+'\n');
		connect(decisionTree, decisionTree.get_root(), child,
		cat, (String)catNames.get(cat));
	}
   }

   MLJ.clamp_above(maxChildWeight, 0, "TDDTInducer.induce_tree_from_split: "
   		   +"maximum child's weight must be non-negative");

	MLJ.ASSERT(largestChild != null,"TDDTInducer.induce_tree_from_split: largestChild == null");
//   DBGSLOW(decisionTree.OK(1));
   
   instLists = null; //delete &instLists;
/*   prune_subtree(decisionTree, myTiebreakingOrder,
		 largestChild, numSubtreeErrors, pessimisticSubtreeErrors,
		 numLeaves);
*/   myTiebreakingOrder = null; //delete myTiebreakingOrder;
/*   
   if (get_debug()) {
      // Cast away constness for modifying the name.
      Categorizer splitC = (Categorizer)decisionTree.
	         get_categorizer(decisionTree.get_root());
      String name = splitC.description();
      double[] distribution = splitC.get_distr();
      int numChars = 128;
      char buffer[numChars];
      for (int chr = 0; chr < numChars; chr++)
	         buffer[chr] = '\0';
      MLCOStream stream(EMPTY_STRING, buffer, numChars);
      stream << distribution;
      String distDescrip = stream.mem_buf();
      String newName = name + "\\nErr=" + String(numSubtreeErrors, 3) +
	 "/" + String(pessimisticSubtreeErrors, 3);
      if (splitC.class_id() != CLASS_CONST_CATEGORIZER)
	 newName += "\\nwDist=" + distDescrip;
      splitC.set_description(newName);
   }
*/
//   if (get_level() == 0)
//      DRIBBLE(endl);
}

/** Connects two nodes in the specified CatGraph.
 * @param catGraph The CatGreph containing these nodes.
 * @param from     The node from which the edge originates.
 * @param to       The node to which the edge connects.
 * @param edgeVal  The value of the AugCategory associated with that edge.
 * @param edgeName The name of the edge.
 */
protected void connect(CatGraph catGraph, Node from, Node to, int edgeVal, String edgeName)
{
   AugCategory edge = new AugCategory(edgeVal, edgeName);
   logOptions.GLOBLOG(6, "TDDTInducer's connect(), given string '" +edgeName
	   +"', is using '" + edge.description()
	   +"' as an edge description\n");
   catGraph.connect(from, to, edge);
//   ASSERT(edge == NULL); // connect() gets ownership
//   catGraph.OK(1);
}

/** Create a string to name the subinducer. We just append some basic info.
 * @return The name of the subinducer.
 * @param catDescr	The description of this subinducer.
 * @param catNum	The category number for which this subinducer is
 * inducing.
 */
public String name_sub_inducer(String catDescr, int catNum)
{
   String CAT_EQUAL = " Cat=";
   String CHILD_EQUAL = " child =";
   
   return description() + CAT_EQUAL + catDescr + CHILD_EQUAL + catNum;
}

/** Create_subinducer creates the Inducer for calling recursively. Note that since
 * this is an abstract class, it can't create a copy of itself.
 *
 * @param dscr The description for the sub inducer.
 * @param aCgraph The categorizer graph to use for the subinducer.
 * @return The new subinducer.
 */
abstract public TDDTInducer create_subinducer(String dscr, CGraph aCgraph);

/** When the subtree rooted from the current node does not improve
 * the error, the subtree may be replaced by a leaf or by its largest
 * child. This serves as a collapsing mechanism if the pruning factor
 * is 0, i.e., we collapse the subtree if it has the same number of
 * errors as all children.<P>
 * "Confidence" pruning is based on C4.5's pruning method. "Penalty"
 * pruning is based on "Pessimistic Decision tree pruning based on tree
 * size" by Yishay Mansour, ICML-97. "Linear" pruning is used to implement
 * cost-complexity pruning as described in CART.  Its use is not
 * recommended otherwise. "KLdistance" pruning uses the Kullback-Leibler
 * distance metric to determine whether to prune.<P>
 * This function is divided into three main parts. First, initial
 * checks are performed and values are set. Second, the test specific
 * to each pruning method is performed. Last, if pruning is
 * necessary, do it.
 * @param decisionTree Tree to be pruned.
 * @param tieBreakingOrder Order for breaking distribution ties.
 * @param largestChild The largest child node.
 * @param numSubtreeErrors Number of errors this subtree produces in categorization of Instances.
 * @param pessimisticSubtreeErrors Error estimate if this was a leaf node.
 * @param numLeaves Number of leaves on a subtree.
 */
public void prune_subtree(DecisionTree decisionTree,
				int[] tieBreakingOrder,
				Node largestChild,
				DoubleRef numSubtreeErrors,
				DoubleRef pessimisticSubtreeErrors,
				IntRef numLeaves)
{
logOptions.LOG(0,"Pruning is taking place.\n");
   MLJ.ASSERT(numSubtreeErrors.value >= 0,"TDDTInducer:prune_subtree:"
			+" numSubtreeErrors < 0");
   MLJ.ASSERT(pessimisticSubtreeErrors.value >= 0,"TDDTInducer:prune_subtree:"
			+" pessimisticSubtreeErrors < 0");
   Node treeRoot = decisionTree.get_root(true);

   // @@ CatDTInducers can't prune, but we don't want to check
   // get_prune_tree() here because even if we're not doing pruning, this code
   // does some useful safety checks. The checks aren't valid on
   // CatDTInducers, because they do not compute pessmisticSubtreeErrors.
//   if (this instanceof CatDTInducer) return;
//   if (class_id() == CatDT_INDUCER)
//      return;

//   DBGSLOW(if (numLeaves != decisionTree.num_nontrivial_leaves())
//	      Error.fatalErr("TDDTInducer.prune_subtree: number of leaves given "
//	          +numLeaves+" is not the same as the number counted "
//	          +decisionTree.num_nontrivial_leaves()));

//   DBGSLOW(
//       // We don't want any side effect logging only in debug level
//       logOptions logOpt(logOptions.get_log_options());
//       logOpt.set_log_level(0);
//       double pess_err =
//         pessimistic_subtree_errors(logOpt, decisionTree, treeRoot, *TS,
//				    get_pruning_factor(), tieBreakingOrder);
//       MLJ.verify_approx_equal(pess_err, pessimisticSubtreeErrors,
//			       "TDDTInducer.prune_subtree: pessimistic error"
//			       " differs from expected value");
//          );
   // How many errors (weighted) would we make with a leaf here?
   int myMajority = TS.majority_category(tieBreakingOrder);
   double numMajority = TS.counters().label_count(myMajority);
   double totalWeight = TS.total_weight();
   double myErrors = totalWeight - numMajority;
   if (!(MLJ.approx_greater(myErrors, numSubtreeErrors.value) ||
	MLJ.approx_equal(myErrors, numSubtreeErrors.value)))
      Error.fatalErr("TDDTInducer.prune_subtree: myErrors is not >= numSubtreeErrors"
	 +": myErrors - numSubtreeErrors = "+(myErrors - numSubtreeErrors.value));
   int numChildren = decisionTree.num_children(treeRoot);

   // test if a leaf; if so, we can exit immediately
   if (numChildren == 0) {
      numSubtreeErrors.value = totalWeight - numMajority;
      numLeaves.value = 1;
      return;
   }
   
   logOptions.LOG(3, "Testing at "
      +decisionTree.get_categorizer(treeRoot).description()
      +" (weight "+decisionTree.get_categorizer(treeRoot).total_weight()
      +')'+'\n');

   boolean pruneSubtree = false;
   boolean pruneChild = false;
   // We need to declare these here, as we use them during pruning
   double myPessimisticErrors = CatTestResult.pessimistic_error_correction(
                    myErrors, TS.total_weight(), get_pruning_factor());
   DoubleRef childPessimisticErrors = new DoubleRef(0);
   if (get_pruning_factor() == 0)  
      MLJ.verify_approx_equal(myPessimisticErrors, myErrors,
			      "TDDTInducer.prune_subtree:pessimistic error "
			      +"when computed for leaf, "
			      +"differs from expected value");

   switch (get_pruning_method()) {
      case confidence:
	 //@@ replace "100 * MLC.real_epsilon()" with "0.1" for
	 //@@   C4.5 functionality 
	 if (myPessimisticErrors - pessimis

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产在线播放一区三区四| 亚洲午夜av在线| 国产精品一卡二卡在线观看| 精品剧情在线观看| 国产乱人伦偷精品视频免下载| 欧美激情一区二区在线| 91视频在线看| 视频在线在亚洲| 国产亚洲欧美在线| 91丨九色丨蝌蚪富婆spa| 亚洲激情在线播放| 欧美一级专区免费大片| 国产99精品国产| 亚洲综合在线观看视频| 91精品婷婷国产综合久久| 国产一区二区三区精品欧美日韩一区二区三区 | 亚洲成人中文在线| 日韩欧美亚洲国产另类| 成人高清视频在线| 亚洲国产日韩一级| 久久影视一区二区| 在线一区二区三区四区| 奇米亚洲午夜久久精品| 1区2区3区国产精品| 777午夜精品视频在线播放| 成人免费视频视频| 日本特黄久久久高潮| 中文字幕av不卡| 欧美精品日日鲁夜夜添| 丁香天五香天堂综合| 亚洲444eee在线观看| 国产精品天天看| 欧美一区三区二区| 91网上在线视频| 国产高清久久久久| 日日摸夜夜添夜夜添精品视频| 中文字幕免费不卡| 日韩欧美久久久| 欧美三级欧美一级| 成人av网站在线观看免费| 日本欧美肥老太交大片| 亚洲精选免费视频| 欧美国产欧美亚州国产日韩mv天天看完整| 欧美三级电影网站| 91美女在线观看| 国产乱子伦视频一区二区三区 | 精品在线亚洲视频| 亚洲国产成人av网| 亚洲免费在线电影| 国产香蕉久久精品综合网| 欧美日韩一区二区三区在线看 | 日韩成人av影视| 亚洲精品视频一区| 欧美国产成人在线| 精品理论电影在线观看| 91精品国产91热久久久做人人| 色综合久久精品| 成人一区在线看| 国产精品一品二品| 国产一区二区电影| 国产一区二区在线视频| 久久se这里有精品| 蜜桃视频在线观看一区二区| 日本欧美一区二区三区| 午夜精品久久久久久久| 亚洲大片免费看| 亚洲国产视频a| 亚洲美女在线国产| 亚洲蜜臀av乱码久久精品蜜桃| 《视频一区视频二区| 中文在线资源观看网站视频免费不卡 | 亚洲欧洲一区二区在线播放| 国产精品久久久久一区二区三区共| www欧美成人18+| 久久青草欧美一区二区三区| 久久蜜桃av一区精品变态类天堂| 精品久久久久久综合日本欧美| 日韩一级在线观看| 欧美成人三级在线| 2023国产精品| 中文在线一区二区| 亚洲欧美偷拍卡通变态| 亚洲激情第一区| 亚洲成人精品一区| 麻豆精品在线视频| 国产精品一二三四五| 粉嫩13p一区二区三区| aaa欧美色吧激情视频| 欧美在线一二三| 欧美一区永久视频免费观看| 久久日一线二线三线suv| 日本一区二区三区四区| 亚洲男人电影天堂| 日韩中文字幕不卡| 国内一区二区在线| 91亚洲精品久久久蜜桃网站| 欧美性极品少妇| 日韩欧美一区二区免费| 久久久久久亚洲综合| 亚洲天堂av一区| 秋霞成人午夜伦在线观看| 韩国三级在线一区| 91麻豆自制传媒国产之光| 欧美日韩一本到| 精品国免费一区二区三区| 国产精品进线69影院| 视频在线观看一区二区三区| 国产麻豆精品一区二区| 在线亚洲一区观看| 精品少妇一区二区三区| **性色生活片久久毛片| 秋霞av亚洲一区二区三| av不卡在线观看| 91精品国产综合久久福利| 国产女同互慰高潮91漫画| 亚洲一级不卡视频| 国产在线精品一区二区夜色| 欧美在线你懂得| 国产三级欧美三级日产三级99| 亚洲激情网站免费观看| 国产精品一区二区三区网站| 欧美性色黄大片| 欧美激情一区在线观看| 欧美a一区二区| 91麻豆免费在线观看| 久久亚洲影视婷婷| 亚洲成人免费影院| 92国产精品观看| 久久综合狠狠综合| 日本女人一区二区三区| 欧美性淫爽ww久久久久无| 国产欧美日韩三级| 久草中文综合在线| 欧美色区777第一页| 亚洲视频免费在线观看| 国产美女视频91| 欧美一个色资源| 亚洲一线二线三线视频| 成人h版在线观看| 国产亚洲综合色| 美腿丝袜亚洲综合| 欧美二区乱c少妇| 一区二区三区资源| 99久久综合色| 亚洲国产精品激情在线观看| 国内精品第一页| 欧美一级免费大片| 午夜精品久久久久久久| 91黄色免费观看| 国产精品乱人伦一区二区| 国产精品1024| 国产亚洲欧美色| 国产精品夜夜嗨| 久久久久99精品国产片| 国产一区二区久久| 欧美sm极限捆绑bd| 精品写真视频在线观看| 日韩欧美国产1| 麻豆成人免费电影| 精品国产一区二区三区忘忧草| 人人精品人人爱| 欧美不卡视频一区| 韩国女主播成人在线| 久久青草国产手机看片福利盒子| 韩国女主播一区| 久久久777精品电影网影网| 国产成人在线视频播放| 欧美国产一区二区| av不卡在线观看| 伊人性伊人情综合网| 欧美视频一区在线观看| 亚洲电影欧美电影有声小说| 欧美剧在线免费观看网站| 日韩国产欧美视频| 精品久久久久99| 国产成人av资源| 亚洲免费视频成人| 欧美日韩国产三级| 美女网站色91| 国产欧美一区二区在线观看| 成人黄色大片在线观看| 亚洲女同女同女同女同女同69| 在线观看视频欧美| 欧美96一区二区免费视频| 亚洲精品一区二区三区香蕉| 成人晚上爱看视频| 亚洲免费大片在线观看| 欧美电影在线免费观看| 极品少妇一区二区| 国产精品毛片久久久久久| 日本高清无吗v一区| 日本欧美大码aⅴ在线播放| 国产日产亚洲精品系列| 91官网在线观看| 久久国产麻豆精品| 亚洲欧美日韩系列| 日韩欧美一卡二卡| 91视频观看视频| 免费观看在线综合| 亚洲欧洲成人精品av97|