亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來(lái)到蟲(chóng)蟲(chóng)下載站! | ?? 資源下載 ?? 資源專(zhuān)輯 ?? 關(guān)于我們
? 蟲(chóng)蟲(chóng)下載站

?? tddtinducer.java

?? 用C編寫(xiě)的數(shù)據(jù)挖掘的相關(guān)算法
?? JAVA
?? 第 1 頁(yè) / 共 5 頁(yè)
字號(hào):
      String wDist = stream.mem_buf();
      stream = null; //delete stream;
      MString& newDescr = leafCat.description();
      String dbgDescr = newDescr + " (#=" + MString(totalWeight,0) +
	 " Err=" + MString(numErrors, 0) + "/" +
	String(pessimisticErrors, 2) + ")\\npDist=" + pDist +
	 "\\nwDist=" + wDist;
      leafCat.set_description(dbgDescr);
   }
*/
   Categorizer cat = leafCat;
   
   LeafCategorizer leafCategorizer = new LeafCategorizer(cat);
//   DBG(ASSERT(cat == null));
   return leafCategorizer;
}

/** Induce decision tree from a given split. The split is provided
 * in the form of a categorizer, which picks which subtree a given
 * instance will follow.
 * @param decisionTree Decision tree induced.
 * @param splitCat The categorizer for this split.
 * @param catNames List of category names.
 * @param tieBreakingOrder Order for breaking distribution ties.
 * @param numSubtreeErrors Number of errors this subtree produces in categorization of Instances.
 * @param pessimisticSubtreeErrors Error estimate if this was a leaf node.
 * @param numLeaves Number of leaves on a subtree.
 * @param remainingSiblings Siblings that have not be induced yet.
 */
protected void induce_tree_from_split(DecisionTree decisionTree, NodeCategorizer splitCat, LinkedList catNames, int[] tieBreakingOrder, DoubleRef numSubtreeErrors, DoubleRef pessimisticSubtreeErrors, IntRef numLeaves, int remainingSiblings)
{
   int[] myTiebreakingOrder =
      CatDist.merge_tiebreaking_order(tieBreakingOrder,
				       TS.counters().label_counts());
   InstanceList[] instLists =
      splitCat.split_instance_list(instance_list());
   // Add one if we have unknown instances
//   IFDRIBBLE(dribble_level(level, splitCat.description(), remainingSiblings));
   numSubtreeErrors.value = 0;
   pessimisticSubtreeErrors.value = 0;
   numLeaves.value = 0;
   DoubleRef numChildErrors = new DoubleRef(0);
   DoubleRef childPessimisticErrors = new DoubleRef(0);
   Node largestChild = null; // with the most instances (weight)
   DoubleRef maxChildWeight = new DoubleRef(-1);   
   for (int cat = 0; cat < instLists.length; cat++) {
      if (instLists[cat].num_instances() >= instance_list().num_instances())
	 Error.fatalErr("TDDTInducer.induce_tree_from_split: the most recent split "
	     +splitCat.description()+" resulted in no reduction of the "
	     +"instance list total weight (from "
	     +instance_list().total_weight()+" to "
	     +instLists[cat].total_weight());
      int remainingChildren = instLists.length - cat;
      Node child;
      if (instLists[cat].no_weight()) {
		// No weight of instances with this value.  Make it a leaf (majority),
		//   unless category unknown.
//		if (cat != UNKNOWN_CATEGORY_VAL)
//			IFDRIBBLE(dribble_level(level+1, "Leaf node",
//				remainingChildren));
		if (get_unknown_edges() || cat != Globals.UNKNOWN_CATEGORY_VAL) { 
			logOptions.LOG(3, "Category: " + (cat - 1)//-1 added to match MLC output -JL
				+" empty.  Assigning majority"+'\n');
			NodeCategorizer[] constCat = new NodeCategorizer[1];
			constCat[0] = create_leaf_categorizer(0, myTiebreakingOrder,
				numChildErrors, childPessimisticErrors);
			if (cat != Globals.UNKNOWN_CATEGORY_VAL)
				++numLeaves.value;  // don't count trivial leaves
			MLJ.ASSERT(numChildErrors.value == 0,"TDDTInducer.induce_tree_from_split: numChildErrors.value != 0");
			MLJ.ASSERT(childPessimisticErrors.value == 0,"TDDTInducer.induce_tree_from_split: childPessimisticErrors.value != 0");
			child = decisionTree.create_node(constCat, get_level() + 1);
			MLJ.ASSERT(constCat[0] == null,"TDDTInducer.induce_tree_from_split: constCat != null");
			//create_node gets ownership
			logOptions.LOG(6, "Created child leaf "+child+'\n');
			logOptions.LOG(6, "Connecting root "+decisionTree.get_root()
				+" to child "+child
				+" with string '"+(String)catNames.get(cat)+"'"+'\n');
			connect(decisionTree, decisionTree.get_root(), child,
				cat, (String)catNames.get(cat));
		}
      } else { // Solve the problem recursively.
		CGraph aCgraph = decisionTree.get_graph();
		logOptions.LOG(3, "Recursive call"+'\n');
		double totalChildWeight = instLists[cat].total_weight();
		TDDTInducer childInducer =
		create_subinducer(name_sub_inducer(splitCat.description(), cat),
			      aCgraph);
		childInducer.set_total_inst_weight(get_total_inst_weight());
		childInducer.assign_data(instLists[cat]);
		IntRef numChildLeaves = new IntRef(0);
		child = childInducer.induce_decision_tree(aCgraph,
						    myTiebreakingOrder,
						    numChildErrors,
						    childPessimisticErrors,
						    numChildLeaves,
						    remainingChildren);
		numSubtreeErrors.value += numChildErrors.value;
		pessimisticSubtreeErrors.value += childPessimisticErrors.value;
		numLeaves.value += numChildLeaves.value;
		if (totalChildWeight > maxChildWeight.value) {
			maxChildWeight.value = totalChildWeight;
			largestChild = child;
		}
		childInducer = null; //delete childInducer;
		Node root = decisionTree.get_root();
		logOptions.LOG(6, "Connecting child "+child+" to root "
			+root+", using "+cat
			+" with string '"+(String)catNames.get(cat)+"'"+'\n');
		connect(decisionTree, decisionTree.get_root(), child,
		cat, (String)catNames.get(cat));
	}
   }

   MLJ.clamp_above(maxChildWeight, 0, "TDDTInducer.induce_tree_from_split: "
   		   +"maximum child's weight must be non-negative");

	MLJ.ASSERT(largestChild != null,"TDDTInducer.induce_tree_from_split: largestChild == null");
//   DBGSLOW(decisionTree.OK(1));
   
   instLists = null; //delete &instLists;
/*   prune_subtree(decisionTree, myTiebreakingOrder,
		 largestChild, numSubtreeErrors, pessimisticSubtreeErrors,
		 numLeaves);
*/   myTiebreakingOrder = null; //delete myTiebreakingOrder;
/*   
   if (get_debug()) {
      // Cast away constness for modifying the name.
      Categorizer splitC = (Categorizer)decisionTree.
	         get_categorizer(decisionTree.get_root());
      String name = splitC.description();
      double[] distribution = splitC.get_distr();
      int numChars = 128;
      char buffer[numChars];
      for (int chr = 0; chr < numChars; chr++)
	         buffer[chr] = '\0';
      MLCOStream stream(EMPTY_STRING, buffer, numChars);
      stream << distribution;
      String distDescrip = stream.mem_buf();
      String newName = name + "\\nErr=" + String(numSubtreeErrors, 3) +
	 "/" + String(pessimisticSubtreeErrors, 3);
      if (splitC.class_id() != CLASS_CONST_CATEGORIZER)
	 newName += "\\nwDist=" + distDescrip;
      splitC.set_description(newName);
   }
*/
//   if (get_level() == 0)
//      DRIBBLE(endl);
}

/** Connects two nodes in the specified CatGraph.
 * @param catGraph The CatGreph containing these nodes.
 * @param from     The node from which the edge originates.
 * @param to       The node to which the edge connects.
 * @param edgeVal  The value of the AugCategory associated with that edge.
 * @param edgeName The name of the edge.
 */
protected void connect(CatGraph catGraph, Node from, Node to, int edgeVal, String edgeName)
{
   AugCategory edge = new AugCategory(edgeVal, edgeName);
   logOptions.GLOBLOG(6, "TDDTInducer's connect(), given string '" +edgeName
	   +"', is using '" + edge.description()
	   +"' as an edge description\n");
   catGraph.connect(from, to, edge);
//   ASSERT(edge == NULL); // connect() gets ownership
//   catGraph.OK(1);
}

/** Create a string to name the subinducer. We just append some basic info.
 * @return The name of the subinducer.
 * @param catDescr	The description of this subinducer.
 * @param catNum	The category number for which this subinducer is
 * inducing.
 */
public String name_sub_inducer(String catDescr, int catNum)
{
   String CAT_EQUAL = " Cat=";
   String CHILD_EQUAL = " child =";
   
   return description() + CAT_EQUAL + catDescr + CHILD_EQUAL + catNum;
}

/** Create_subinducer creates the Inducer for calling recursively. Note that since
 * this is an abstract class, it can't create a copy of itself.
 *
 * @param dscr The description for the sub inducer.
 * @param aCgraph The categorizer graph to use for the subinducer.
 * @return The new subinducer.
 */
abstract public TDDTInducer create_subinducer(String dscr, CGraph aCgraph);

/** When the subtree rooted from the current node does not improve
 * the error, the subtree may be replaced by a leaf or by its largest
 * child. This serves as a collapsing mechanism if the pruning factor
 * is 0, i.e., we collapse the subtree if it has the same number of
 * errors as all children.<P>
 * "Confidence" pruning is based on C4.5's pruning method. "Penalty"
 * pruning is based on "Pessimistic Decision tree pruning based on tree
 * size" by Yishay Mansour, ICML-97. "Linear" pruning is used to implement
 * cost-complexity pruning as described in CART.  Its use is not
 * recommended otherwise. "KLdistance" pruning uses the Kullback-Leibler
 * distance metric to determine whether to prune.<P>
 * This function is divided into three main parts. First, initial
 * checks are performed and values are set. Second, the test specific
 * to each pruning method is performed. Last, if pruning is
 * necessary, do it.
 * @param decisionTree Tree to be pruned.
 * @param tieBreakingOrder Order for breaking distribution ties.
 * @param largestChild The largest child node.
 * @param numSubtreeErrors Number of errors this subtree produces in categorization of Instances.
 * @param pessimisticSubtreeErrors Error estimate if this was a leaf node.
 * @param numLeaves Number of leaves on a subtree.
 */
public void prune_subtree(DecisionTree decisionTree,
				int[] tieBreakingOrder,
				Node largestChild,
				DoubleRef numSubtreeErrors,
				DoubleRef pessimisticSubtreeErrors,
				IntRef numLeaves)
{
logOptions.LOG(0,"Pruning is taking place.\n");
   MLJ.ASSERT(numSubtreeErrors.value >= 0,"TDDTInducer:prune_subtree:"
			+" numSubtreeErrors < 0");
   MLJ.ASSERT(pessimisticSubtreeErrors.value >= 0,"TDDTInducer:prune_subtree:"
			+" pessimisticSubtreeErrors < 0");
   Node treeRoot = decisionTree.get_root(true);

   // @@ CatDTInducers can't prune, but we don't want to check
   // get_prune_tree() here because even if we're not doing pruning, this code
   // does some useful safety checks. The checks aren't valid on
   // CatDTInducers, because they do not compute pessmisticSubtreeErrors.
//   if (this instanceof CatDTInducer) return;
//   if (class_id() == CatDT_INDUCER)
//      return;

//   DBGSLOW(if (numLeaves != decisionTree.num_nontrivial_leaves())
//	      Error.fatalErr("TDDTInducer.prune_subtree: number of leaves given "
//	          +numLeaves+" is not the same as the number counted "
//	          +decisionTree.num_nontrivial_leaves()));

//   DBGSLOW(
//       // We don't want any side effect logging only in debug level
//       logOptions logOpt(logOptions.get_log_options());
//       logOpt.set_log_level(0);
//       double pess_err =
//         pessimistic_subtree_errors(logOpt, decisionTree, treeRoot, *TS,
//				    get_pruning_factor(), tieBreakingOrder);
//       MLJ.verify_approx_equal(pess_err, pessimisticSubtreeErrors,
//			       "TDDTInducer.prune_subtree: pessimistic error"
//			       " differs from expected value");
//          );
   // How many errors (weighted) would we make with a leaf here?
   int myMajority = TS.majority_category(tieBreakingOrder);
   double numMajority = TS.counters().label_count(myMajority);
   double totalWeight = TS.total_weight();
   double myErrors = totalWeight - numMajority;
   if (!(MLJ.approx_greater(myErrors, numSubtreeErrors.value) ||
	MLJ.approx_equal(myErrors, numSubtreeErrors.value)))
      Error.fatalErr("TDDTInducer.prune_subtree: myErrors is not >= numSubtreeErrors"
	 +": myErrors - numSubtreeErrors = "+(myErrors - numSubtreeErrors.value));
   int numChildren = decisionTree.num_children(treeRoot);

   // test if a leaf; if so, we can exit immediately
   if (numChildren == 0) {
      numSubtreeErrors.value = totalWeight - numMajority;
      numLeaves.value = 1;
      return;
   }
   
   logOptions.LOG(3, "Testing at "
      +decisionTree.get_categorizer(treeRoot).description()
      +" (weight "+decisionTree.get_categorizer(treeRoot).total_weight()
      +')'+'\n');

   boolean pruneSubtree = false;
   boolean pruneChild = false;
   // We need to declare these here, as we use them during pruning
   double myPessimisticErrors = CatTestResult.pessimistic_error_correction(
                    myErrors, TS.total_weight(), get_pruning_factor());
   DoubleRef childPessimisticErrors = new DoubleRef(0);
   if (get_pruning_factor() == 0)  
      MLJ.verify_approx_equal(myPessimisticErrors, myErrors,
			      "TDDTInducer.prune_subtree:pessimistic error "
			      +"when computed for leaf, "
			      +"differs from expected value");

   switch (get_pruning_method()) {
      case confidence:
	 //@@ replace "100 * MLC.real_epsilon()" with "0.1" for
	 //@@   C4.5 functionality 
	 if (myPessimisticErrors - pessimis

?? 快捷鍵說(shuō)明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
91福利在线观看| 欧美国产精品专区| 欧美一a一片一级一片| www.日韩av| 成人免费视频视频| 国产成人综合亚洲91猫咪| 九九热在线视频观看这里只有精品| 亚洲高清一区二区三区| 尤物av一区二区| 亚洲另类中文字| 一区二区三区四区视频精品免费| 亚洲天堂精品视频| 一区二区三区在线视频免费| 一区二区三区高清在线| 亚洲综合色婷婷| 亚洲成人av福利| 青青草国产精品97视觉盛宴| 日本成人超碰在线观看| 久久成人羞羞网站| 国产精品自产自拍| 99久久国产综合精品色伊| 国产欧美日韩亚州综合 | 国产丶欧美丶日本不卡视频| 国产一区二区精品久久91| 国产成人精品三级麻豆| 成人app软件下载大全免费| 99久久国产综合精品色伊| 欧美最猛性xxxxx直播| 欧美三级电影网| 精品免费日韩av| 国产午夜精品理论片a级大结局 | 亚洲综合区在线| 天堂av在线一区| 久久99精品国产麻豆婷婷 | 欧美老年两性高潮| 日韩亚洲欧美综合| 久久综合久久综合久久| 国产精品久久久久久久久图文区| 亚洲男人的天堂av| 男女男精品视频| 精品亚洲国产成人av制服丝袜| 国产精品亚洲成人| 在线观看中文字幕不卡| 日韩欧美精品在线视频| 欧美国产日韩精品免费观看| 一区二区三区在线免费播放| 蜜桃精品视频在线| jlzzjlzz亚洲日本少妇| 欧美久久久久久久久中文字幕| 久久综合久色欧美综合狠狠| 亚洲黄色在线视频| 激情六月婷婷综合| 在线视频国产一区| 精品成人免费观看| 一区二区三区中文在线| 久久99热这里只有精品| 91小视频免费观看| 日韩三级视频在线观看| 国产精品不卡视频| 麻豆免费精品视频| 一本一道久久a久久精品综合蜜臀| 欧美一区日韩一区| 亚洲精品国产一区二区精华液 | 五月天久久比比资源色| 丁香五精品蜜臀久久久久99网站 | 精品成人a区在线观看| 一区二区视频免费在线观看| 国产高清在线观看免费不卡| 欧美亚洲一区二区在线| 中文字幕乱码亚洲精品一区| 日本亚洲电影天堂| 在线区一区二视频| 中文字幕不卡一区| 精品一区二区三区视频在线观看 | 91丨porny丨中文| 精品久久久久久无| 首页国产欧美久久| 色婷婷综合久久久久中文一区二区 | 国产xxx精品视频大全| 欧美日韩美女一区二区| 国产精品久久久久永久免费观看 | 香蕉乱码成人久久天堂爱免费| 国产激情视频一区二区三区欧美| 欧美一区二区三区四区高清| 亚洲黄色性网站| 91丝袜国产在线播放| 久久久精品国产免费观看同学| 免费在线观看日韩欧美| 欧美视频一区二区三区| 亚洲另类在线一区| 成人性生交大片免费看中文| 精品乱码亚洲一区二区不卡| 日韩精品国产精品| 欧美日韩久久不卡| 亚洲午夜一区二区| 一本大道综合伊人精品热热| 亚洲欧洲av色图| 99re8在线精品视频免费播放| 久久久久久久久久久99999| 久久99精品久久久久久国产越南| 日韩美女在线视频| 奇米影视一区二区三区| 7777精品伊人久久久大香线蕉经典版下载 | 欧美一区二区三区不卡| 首页国产丝袜综合| 9191成人精品久久| 日韩主播视频在线| 91精品久久久久久久久99蜜臂| 亚洲国产精品久久人人爱蜜臀| 欧美午夜一区二区| 午夜精品一区在线观看| 欧美老肥妇做.爰bbww视频| 丝袜美腿高跟呻吟高潮一区| 91精品国产一区二区| 美女在线一区二区| 精品日韩99亚洲| 国产综合色在线| 国产精品萝li| 99re热这里只有精品视频| 亚洲男人的天堂av| 精品视频资源站| 日韩国产欧美三级| 精品国产免费久久| 成人a级免费电影| 亚洲欧美激情视频在线观看一区二区三区 | 中文字幕在线一区免费| 91老师片黄在线观看| 一区二区三区欧美在线观看| 欧美影视一区二区三区| 日韩精品亚洲专区| 欧美精品一区二| yourporn久久国产精品| 亚洲一区二区美女| 日韩欧美一区二区视频| 国产在线播放一区三区四| 中文字幕第一区| 欧美在线|欧美| 美女网站色91| 中文字幕 久热精品 视频在线| 色综合色综合色综合| 日韩av网站免费在线| 久久精品免费在线观看| 91丨九色丨蝌蚪富婆spa| 日韩国产在线一| 国产欧美日韩精品一区| 91成人看片片| 精品午夜一区二区三区在线观看| 中文字幕va一区二区三区| 欧美在线三级电影| 国产乱理伦片在线观看夜一区| 亚洲色图色小说| 日韩一区二区电影| 99久久精品国产观看| 秋霞影院一区二区| 国产精品日韩成人| 69堂国产成人免费视频| 国产超碰在线一区| 亚洲成国产人片在线观看| 久久久久久麻豆| 欧美三级资源在线| 成人午夜视频福利| 麻豆国产欧美日韩综合精品二区| 中文字幕电影一区| 欧美成人一级视频| 欧美系列在线观看| 成人黄色软件下载| 久久精品国产成人一区二区三区 | 久久久噜噜噜久噜久久综合| 欧美系列一区二区| 99视频一区二区| 精品一区二区在线视频| 亚洲精品中文字幕乱码三区 | 午夜精品久久久| 中文字幕一区二区三区不卡在线 | 性做久久久久久| 日韩一区日韩二区| 国产视频在线观看一区二区三区 | 午夜伦理一区二区| 亚洲视频一二区| 国产欧美1区2区3区| 日韩精品一区在线观看| 欧美主播一区二区三区美女| 国产成人超碰人人澡人人澡| 免费视频最近日韩| 亚洲高清不卡在线| 亚洲人精品一区| 国产精品毛片无遮挡高清| 精品国产乱码久久| 91精品国产品国语在线不卡| 欧美亚洲尤物久久| 一本大道久久精品懂色aⅴ| 福利一区福利二区| 精品一二三四区| 美女视频一区在线观看| 午夜精品久久一牛影视| 一区二区成人在线| 一区二区三区**美女毛片| 亚洲品质自拍视频| 国产精品理伦片| 国产精品免费看片|