亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? mlknn.java

?? Multi-label classification 和weka集成
?? JAVA
字號:
package mulan.classifier;

import weka.core.Instance;
import weka.core.Instances;
import weka.core.TechnicalInformation;
import weka.core.Utils;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import weka.core.neighboursearch.LinearNNSearch;

/**
 * 
 * <!-- globalinfo-start -->
 * 
 * <pre>
 * Class implementing the ML-kNN (Multi-Label k Nearest Neighbours) algorithm.
 * The class is based on the pseudo-code made available by the authors,
 * except for the option to use <it>normalized</it> Euclidean distance as a
 * distance function.
 * </pre>
 * 
 * For more information:
 * 
 * <pre>
 * Zhang, M. and Zhou, Z. 2007. ML-KNN: A lazy learning approach to multi-label learning.
 * Pattern Recogn. 40, 7 (Jul. 2007), 2038-2048. DOI=http://dx.doi.org/10.1016/j.patcog.2006.12.019
 * </pre>
 * 
 * <!-- globalinfo-end -->
 * 
 * <!-- technical-bibtex-start --> BibTeX:
 * 
 * <pre>
 * &#064;article{zhang+zhou:2007,
 *    author = {Min-Ling Zhang and Zhi-Hua Zhou},
 *    title = {ML-KNN: A lazy learning approach to multi-label learning},
 *    journal = {Pattern Recogn.},
 *    volume = {40},
 *    number = {7},
 *    year = {2007},
 *    issn = {0031-3203},
 *    pages = {2038--2048},
 *    doi = {http://dx.doi.org/10.1016/j.patcog.2006.12.019},
 *    publisher = {Elsevier Science Inc.},
 *    address = {New York, NY, USA},
 * }
 * </pre>
 * 
 * <p/> <!-- technical-bibtex-end -->
 *
 * @author Eleftherios Spyromitros-Xioufis ( espyromi@csd.auth.gr )
 * @version $Revision: 1.1 $ 
 */
@SuppressWarnings("serial")
public class MLkNN extends MultiLabelKNN {
	/**
	 * Smoothing parameter controlling the strength of uniform prior <br>
	 * (Default value is set to 1 which yields the Laplace smoothing).
	 */
	private double smooth;
	/**
	 * A table holding the prior probability for an instance to belong in each
	 * class
	 */
	private double[] PriorProbabilities;
	/**
	 * A table holding the prior probability for an instance not to belong in
	 * each class
	 */
	private double[] PriorNProbabilities;
	/**
	 * A table holding the probability for an instance to belong in each class<br>
	 * given that i:0..k of its neighbors belong to that class
	 */
	private double[][] CondProbabilities;
	/**
	 * A table holding the probability for an instance not to belong in each
	 * class<br>
	 * given that i:0..k of its neighbors belong to that class
	 */
	private double[][] CondNProbabilities;

	/**
	 * An empty constructor
	 */
	public MLkNN() {
	}

	/**
	 * @param numLabels:
	 *            the number of labels of the dataset
	 * @param numOfNeighbors :
	 *            the number of neighbors
	 * @param smooth :
	 *            the smoothing factor
	 */
	public MLkNN(int numLabels, int numOfNeighbors, double smooth) {
		super(numLabels,numOfNeighbors);
		this.smooth = smooth;
		dontNormalize = true;
		PriorProbabilities = new double[numLabels];
		PriorNProbabilities = new double[numLabels];
		CondProbabilities = new double[numLabels][numOfNeighbors + 1];
		CondNProbabilities = new double[numLabels][numOfNeighbors + 1];
	}

	/**
	 * Returns an instance of a TechnicalInformation object, containing detailed
	 * information about the technical background of this class, e.g., paper
	 * reference or book this class is based on.
	 * 
	 * @return the technical information about this class
	 */
    @Override
	public TechnicalInformation getTechnicalInformation() {
		TechnicalInformation result;

		result = new TechnicalInformation(Type.ARTICLE);
		result.setValue(Field.AUTHOR, "Min-Ling Zhang and Zhi-Hua Zhou");
		result.setValue(Field.TITLE, "ML-KNN: A lazy learning approach to multi-label learning");
		result.setValue(Field.JOURNAL, "Pattern Recogn.");
		result.setValue(Field.VOLUME, "40");
		result.setValue(Field.NUMBER, "7");
		result.setValue(Field.YEAR, "2007");
		result.setValue(Field.ISSN, "0031-3203");
		result.setValue(Field.PAGES, "2038--2048");
		result.setValue(Field.PUBLISHER, "Elsevier Science Inc.");
		result.setValue(Field.ADDRESS, "New York, NY, USA");

		return result;
	}

	public void buildClassifier(Instances train) throws Exception {
		super.buildClassifier(train);
		
		ComputePrior(train);
		ComputeCond(train);

	}

	/**
	 * Computing Prior and PriorN Probabilities for each class of the training
	 * set
	 * 
	 * @param train :
	 *            the training dataset
	 */
	private void ComputePrior(Instances train) {
		for (int i = 0; i < numLabels; i++) {
			int temp_Ci = 0;
			for (int j = 0; j < train.numInstances(); j++) {
				double value = Double.parseDouble(train.attribute(predictors + i).value(
						(int) train.instance(j).value(predictors + i)));
				if (Utils.eq(value, 1.0)) {
					temp_Ci++;
				}
			}
			PriorProbabilities[i] = (smooth + temp_Ci) / (smooth * 2 + train.numInstances());
			PriorNProbabilities[i] = 1 - PriorProbabilities[i];
		}
	}

	/**
	 * Computing Cond and CondN Probabilities for each class of the training set
	 * 
	 * @param train :
	 *            the training dataset
	 */
	private void ComputeCond(Instances train) throws Exception {

		lnn = new LinearNNSearch();
		lnn.setDistanceFunction(dfunc);
		lnn.setInstances(train);
		lnn.setMeasurePerformance(false);
		
		// this implementation doesn't need it 
		// lnn.setSkipIdentical(true); 

		// c[k] counts the number of training instances with label i whose k
		// nearest neighbours contain exactly k instances with label i
		int[][] temp_Ci = new int[numLabels][numOfNeighbors + 1];
		int[][] temp_NCi = new int[numLabels][numOfNeighbors + 1];

		for (int i = 0; i < train.numInstances(); i++) {

			Instances knn = new Instances(lnn
					.kNearestNeighbours(train.instance(i), numOfNeighbors));

			// now compute values of temp_Ci and temp_NCi for every class label
			for (int j = 0; j < numLabels; j++) {

				int aces = 0; // num of aces in Knn for j
				for (int k = 0; k < numOfNeighbors; k++) {
					double value = Double.parseDouble(train.attribute(predictors + j).value(
							(int) knn.instance(k).value(predictors + j)));
					if (Utils.eq(value, 1.0)) {
						aces++;
					}
				}
				// raise the counter of temp_Ci[j][aces] and temp_NCi[j][aces] by 1
				if (Utils.eq(Double.parseDouble(train.attribute(predictors + j).value(
						(int) train.instance(i).value(predictors + j))), 1.0)) {
					temp_Ci[j][aces]++;
				} else {
					temp_NCi[j][aces]++;
				}
			}
		}

		// compute CondProbabilities[i][..] for labels based on temp_Ci[]
		for (int i = 0; i < numLabels; i++) {
			int temp1 = 0;
			int temp2 = 0;
			for (int j = 0; j < numOfNeighbors + 1; j++) {
				temp1 += temp_Ci[i][j];
				temp2 += temp_NCi[i][j];
			}
			for (int j = 0; j < numOfNeighbors + 1; j++) {
				CondProbabilities[i][j] = (smooth + temp_Ci[i][j])
						/ (smooth * (numOfNeighbors + 1) + temp1);
				CondNProbabilities[i][j] = (smooth + temp_NCi[i][j])
						/ (smooth * (numOfNeighbors + 1) + temp2);
			}
		}
	}

	public Prediction makePrediction(Instance instance) throws Exception {

		double[] confidences = new double[numLabels];
		double[] predictions = new double[numLabels];

		//setThreshold(0.5);
		//in cross-validation test-train instances does not belong to the same data set
		//Instance instance2 = new Instance(instance);

		Instances knn = new Instances(lnn.kNearestNeighbours(instance, numOfNeighbors));

		for (int i = 0; i < numLabels; i++) {
			// compute sum of aces in KNN
			int aces = 0; // num of aces in Knn for i
			for (int k = 0; k < numOfNeighbors; k++) {
				double value = Double.parseDouble(train.attribute(predictors + i).value(
						(int) knn.instance(k).value(predictors + i)));
				if (Utils.eq(value, 1.0)) {
					aces++;
				}
			}
			double Prob_in = PriorProbabilities[i] * CondProbabilities[i][aces];
			double Prob_out = PriorNProbabilities[i] * CondNProbabilities[i][aces];
			confidences[i] = Prob_in / (Prob_in + Prob_out); // ranking function
		}
		
		predictions = labelsFromConfidences(confidences);
		Prediction result = new Prediction(predictions, confidences);
		return result;
	}

	public void output() {
		System.out.println("Computed Prior Probabilities");
		for (int i = 0; i < numLabels; i++) {
			System.out.println("Label " + (i + 1) + ": " + PriorProbabilities[i]);
		}
		System.out.println("Computed Posterior Probabilities");
		for (int i = 0; i < numLabels; i++) {
			System.out.println("Label " + (i + 1));
			for (int j = 0; j < numOfNeighbors + 1; j++) {
				System.out.println(j + " neighbours: " + CondProbabilities[i][j]);
				System.out.println(j + " neighbours: " + CondNProbabilities[i][j]);
			}
		}
	}
}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
youjizz国产精品| 九九久久精品视频| 国产精品嫩草影院av蜜臀| 欧美一区二区三区爱爱| 777色狠狠一区二区三区| 欧美三级午夜理伦三级中视频| 91免费版在线看| 欧日韩精品视频| 在线日韩一区二区| 欧美日韩激情在线| 日韩欧美电影一区| 久久久久国产精品人| 久久久久久毛片| 国产精品久久久久久妇女6080| 国产精品成人网| 伊人婷婷欧美激情| 日韩**一区毛片| 国产精品一区二区三区乱码| 成人午夜私人影院| 欧美亚洲免费在线一区| 日韩一级成人av| 亚洲国产精品精华液ab| 色婷婷久久久亚洲一区二区三区| 国产91高潮流白浆在线麻豆 | 欧美精品色综合| 日韩欧美一级二级三级 | 成人丝袜18视频在线观看| 成人综合在线网站| 欧美日韩一区不卡| 久久这里只有精品6| 自拍av一区二区三区| 天天免费综合色| 丁香六月综合激情| 欧美日韩精品一区视频| 久久久久久久久97黄色工厂| 亚洲精品高清视频在线观看| 免费人成黄页网站在线一区二区 | 久久精品欧美日韩| 亚洲综合一二三区| 国产不卡视频一区| 91精品国产综合久久久蜜臀粉嫩 | 中文字幕制服丝袜成人av| 亚洲一区二区三区激情| 国产精品综合视频| 欧美精品免费视频| 国产精品国产三级国产三级人妇 | 91原创在线视频| 精品免费视频.| 亚洲午夜精品在线| 成人激情黄色小说| 精品国产乱码久久久久久久久 | 成人av在线电影| 日韩精品一区二区在线观看| 伊人夜夜躁av伊人久久| 成人免费毛片片v| 精品少妇一区二区三区免费观看 | 欧美一二三四区在线| 中文字幕一区二区三区乱码在线| 蜜臀av国产精品久久久久| 欧美亚洲一区二区在线| 国产精品美女久久久久aⅴ国产馆| 蜜桃传媒麻豆第一区在线观看| 欧美在线短视频| 中文字幕综合网| 国产99精品国产| 国产日韩在线不卡| 国产九色sp调教91| 久久久久久久综合色一本| 欧美aaa在线| 8x福利精品第一导航| 亚洲成人动漫在线观看| 欧美色图第一页| 一区二区三区高清不卡| 91美女视频网站| 亚洲色图.com| 99re6这里只有精品视频在线观看| 国产亚洲成av人在线观看导航 | 国产校园另类小说区| 国内成人免费视频| 久久精品在线观看| 国产成人免费网站| 久久欧美一区二区| 成人污视频在线观看| 欧美激情一二三区| 成人黄色免费短视频| 国产精品久久久久7777按摩| 国产91丝袜在线18| 一区二区三区在线观看国产| 欧美一区二区三区日韩视频| 亚洲综合丝袜美腿| 91麻豆精品国产自产在线观看一区 | 99麻豆久久久国产精品免费| 一区视频在线播放| 91麻豆.com| 亚洲成a人v欧美综合天堂| 日韩一二三四区| 国产成人一区二区精品非洲| 国产精品成人免费精品自在线观看| 粉嫩aⅴ一区二区三区四区五区| 欧美极品美女视频| 欧美伊人久久久久久久久影院| 午夜精品福利一区二区三区av| 精品国产凹凸成av人网站| 懂色中文一区二区在线播放| 亚洲午夜电影在线观看| 精品国产乱码久久久久久久| av在线不卡网| 日本不卡不码高清免费观看| 欧美mv日韩mv国产网站| 色婷婷综合久久久中文一区二区| 午夜视频在线观看一区二区三区| 日韩精品资源二区在线| 99久久精品国产一区| 亚洲高清久久久| 久久影院视频免费| 欧美午夜精品一区二区蜜桃| 国产伦精品一区二区三区视频青涩 | 一区二区国产盗摄色噜噜| 日韩精品专区在线影院重磅| 成人久久18免费网站麻豆| 午夜av一区二区| 亚洲欧美成人一区二区三区| 欧美一二三四区在线| 色老汉一区二区三区| 极品少妇一区二区| 亚洲福中文字幕伊人影院| 国产日韩成人精品| 91精品久久久久久蜜臀| 在线亚洲+欧美+日本专区| 国产美女精品在线| 久久99国产精品久久| 一区二区免费在线播放| 中文字幕欧美一| 国产亚洲综合在线| 91精品国产91综合久久蜜臀| 91视频com| 不卡一区中文字幕| 麻豆91在线看| 日韩黄色一级片| 亚洲精品久久7777| 亚洲视频在线一区观看| 国产精品人成在线观看免费| 久久亚洲精精品中文字幕早川悠里| 欧美美女网站色| 欧美日韩不卡一区| 欧美又粗又大又爽| 在线亚洲一区观看| 99精品视频在线播放观看| 成人一区二区三区| 国产九色sp调教91| 国产乱人伦精品一区二区在线观看 | 亚洲福利电影网| 中文字幕日韩精品一区| 国产精品国产精品国产专区不蜜 | 精品国产污网站| 日韩女同互慰一区二区| 91精品国产欧美日韩| 3atv一区二区三区| 日韩手机在线导航| 日韩一区二区三区观看| 91麻豆精品国产91久久久资源速度 | 久久免费电影网| 久久精品夜夜夜夜久久| 国产精品网站导航| 国产精品美女久久久久久2018| 国产精品妹子av| 亚洲人成小说网站色在线| 一区二区三区四区不卡在线| 亚洲成人精品影院| 久久99精品视频| 成人网在线播放| 欧美日韩一区二区三区四区| 777xxx欧美| 久久免费视频一区| 亚洲乱码一区二区三区在线观看| 亚洲午夜久久久久久久久电影网 | 精品国产免费一区二区三区四区| 国产香蕉久久精品综合网| 国产精品护士白丝一区av| 亚洲综合清纯丝袜自拍| 久久电影国产免费久久电影| 懂色av噜噜一区二区三区av| 色综合久久久网| 日韩欧美一级片| 亚洲欧洲av一区二区三区久久| 亚洲一区二区精品视频| 精品一区二区三区在线观看| 国产91精品一区二区麻豆网站| 在线观看一区日韩| 久久综合九色综合欧美98| 一区二区三区色| 国产精品一二三区在线| 欧美性受xxxx| 国产天堂亚洲国产碰碰| 午夜伦理一区二区| 风间由美中文字幕在线看视频国产欧美| 91视频www| 久久久影视传媒| 视频一区二区欧美| 97久久人人超碰|