亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? knnpartialdistance.java

?? 在weak環境下的knn算法開發 具體需要的說明都在文件包中
?? JAVA
字號:
/** K-nearest neighbor with partial distance
 * This works by omitting certain specified attributes in each
 * distance calculation. If the distance is greater than the distance
 * of the greatest full-dimensional k-nearest-neighbor, then it will
 * be dismissed with fewer computations. If not, the full d-dimensional
 * sample will be tested against the current nearest neighbors.
 * @author JohnChap
 */

package cap5638;

import weka.classifiers.Classifier;
import weka.classifiers.Evaluation;
import weka.core.*;
import java.util.LinkedList;
import java.util.Hashtable;
import java.util.Enumeration;

public class KnnPartialDistance extends Classifier
{
    Instances trainingSet;
    Instance[] moddedTrainingSet;
    int[] leaveOutAttributes;
    boolean[] isIncluded;
    int k;

    // inner class to hold a pair of doubles 
    // (for maintaining a running list of nearest neighbors)
    class DoublePair 
    {
        double _class;
        double distance;

        DoublePair(double a, double b) {
            _class = a;
            distance = b;
        }
    }


    /** Set k constructor
     * @param k
     */
    KnnPartialDistance(int kValue, int[] leaveOut) {
        k = kValue;
        leaveOutAttributes = leaveOut;
    }
 
        
    /** Returns the description of the classifier. 
     * @return description of the KNN class.
     */
    public String globalInfo()
    {
        return "Class to apply the k-nearest-neighbor rule on a given" +
                "test set, by giving each test sample the label most" +
                "frequently represented among the k nearest training" +
                "samples.";
    }

    /** sets k 
     * @param new_k  new k value.
     */
    public void set_k(int new_k) {
        k = new_k;
    }

    /**gets k
     * @return  k
     */
    public int get_k() {
        return k;
    }
        
       
    /** Builds the classifier using the given training set.
     * @param Instances
     */
    public void buildClassifier(Instances data) throws Exception
    {
        Enumeration en = data.enumerateInstances();
        while (en.hasMoreElements()) {
            if (((Instance)en.nextElement()).hasMissingValue()) {
                throw new NoSupportForMissingValuesException("KNN: no support for missing values.");
            }
        }
        trainingSet = new Instances(data);
        trainingSet.deleteWithMissingClass();
        
        // too complicated for words
        isIncluded = new boolean[trainingSet.numAttributes()];
        for (int i = 0; i < trainingSet.numAttributes(); i++) {
            isIncluded[i] = true;
            for (int j = 0; j < leaveOutAttributes.length; j++) {
                if (leaveOutAttributes[j] == i) {
                    isIncluded[i] = false;
                    break;
                }
            }
        }
        
        moddedTrainingSet = new Instance[trainingSet.numInstances()];
        Instance tempInstance;
        int blankIndex;
        int modSize = trainingSet.numAttributes() - leaveOutAttributes.length;
        for (int i = 0; i < trainingSet.numInstances(); i++) {
            tempInstance = new Instance(modSize);
            blankIndex = 0;
            for (int j = 0; j < trainingSet.numAttributes(); j++) {
                if (isIncluded[j]) {
                    tempInstance.setValue(blankIndex, trainingSet.instance(i).value(j));
                    blankIndex++;
                }
            }
            moddedTrainingSet[i] = tempInstance;
        }
    }

    /** Classifies a single test instance by comparing distances
     * between the instance and the points in the training set.
     * @param instance  instance to classify.
     * @return  predicted most likely class for the instance or Instance.missingValue() if no prediction is made.
     */
    public double classifyInstance(Instance instance) throws Exception
    {
        Double greatestClass = new Double(0.0);
        if (instance.hasMissingValue()) {
            throw new NoSupportForMissingValuesException("KNN: no support for missing values.");
        }
        
        Instance moddedInstance = new Instance(instance.numAttributes() - leaveOutAttributes.length);
     
        // pre-make the modified instance
        int blankIndex = 0;
        for (int j = 0; j < instance.numAttributes(); j++) {
            if (isIncluded[j]) {
                moddedInstance.setValue(blankIndex, instance.value(j));
                blankIndex++;
            }
        }
        
        LinkedList knnList = new LinkedList();

        double subtotal;
        double subtotalP;
        DoublePair classDistance = new DoublePair(0, 0);
        
        // partial distance
        for (int p = 0; p < moddedTrainingSet.length; p++) {
            subtotalP = 0;			
            for (int q = 0; q < moddedInstance.numAttributes(); q++) {
                if (q != moddedInstance.numAttributes() - 1) {
                    // Euclidean
                    subtotalP += Math.pow((moddedTrainingSet[p].value(q) - moddedInstance.value(q)), 2.0);
                }
            }
            subtotalP = Math.sqrt(subtotalP);	

            // manage the current k nearest neighbors
            for (int q = 0; q < k; q++) { 
                if (knnList.size() < k) {
                    if (q == knnList.size()) {
                            // DO REGULAR KNN list managing
                            for (int i = 0; i < trainingSet.numInstances(); i++) {
                                subtotal = 0;			
                                for (int j = 0; j < instance.numAttributes(); j++) {
                                    if (j != instance.classIndex()) {
                                        // Euclidean
                                        subtotal += Math.pow((trainingSet.instance(i).value(j) - instance.value(j)), 2.0);
                                    }
                                }
                                subtotal = Math.sqrt(subtotal);	

                                // manage the current k nearest neighbors
                                for (int j = 0; j < k; j++) { 
                                    if (knnList.size() < k) {
                                        if (j == knnList.size()) {
                                            classDistance._class = trainingSet.instance(i).classValue();
                                            classDistance.distance = subtotal;
                                            knnList.addLast(classDistance);
                                            break;
                                        }
                                    }
                                    if (subtotal < ((DoublePair)knnList.get(j)).distance) {
                                        classDistance._class = trainingSet.instance(i).classValue();
                                        classDistance.distance = subtotal;
                                        knnList.add(j, classDistance);

                                        if (knnList.size() > k) {
                                            knnList.remove(k);
                                        }	
                                        break;
                                    }
                                }
                            }
                      //  classDistance._class = trainingSet.instance(i).classValue();
                     //   classDistance.distance = subtotal;
                      //  knnList.addLast(classDistance);
                        break;
                    }
                }
                if (subtotalP < ((DoublePair)knnList.get(q)).distance) {
                   // COMPUTE REGULAR KNN AGAIN
                    for (int i = 0; i < trainingSet.numInstances(); i++) {
                        subtotal = 0;			
                        for (int j = 0; j < instance.numAttributes(); j++) {
                            if (j != instance.classIndex()) {
                                // Euclidean
                                subtotal += Math.pow((trainingSet.instance(i).value(j) - instance.value(j)), 2.0);
                            }
                        }
                        subtotal = Math.sqrt(subtotal);	

                        // manage the current k nearest neighbors
                        for (int j = 0; j < k; j++) { 
                            if (knnList.size() < k) {
                                if (j == knnList.size()) {
                                    classDistance._class = trainingSet.instance(i).classValue();
                                    classDistance.distance = subtotal;
                                    knnList.addLast(classDistance);
                                    break;
                                }
                            }
                            if (subtotal < ((DoublePair)knnList.get(j)).distance) {
                                classDistance._class = trainingSet.instance(i).classValue();
                                classDistance.distance = subtotal;
                                knnList.add(j, classDistance);

                                if (knnList.size() > k) {
                                    knnList.remove(k);
                                }	
                                break;
                            }
                        }
                    }
                }
            }
        }
        // I guess that will work?
        // it is 5:34am

        // The hash table is an efficient way to store
        // classes (key) and number of occurances (values)
        // in the k nearest neighbors.
        Hashtable hashT = new Hashtable((trainingSet.numClasses() * 2));

        DoublePair dubPair;
        Integer counter;
        while (knnList.size() > 0) {
            dubPair = (DoublePair)knnList.removeFirst();
            counter = (Integer)(hashT.get(new Double(dubPair._class)));
            if (counter == null) {
                // iff the key doesn't yet exist in hash table
                counter = new Integer(0);
            }
            hashT.put(new Double(dubPair._class), new Integer(counter.intValue() + 1));
        }	

        Integer mostFrequent = new Integer(0);
        Double currentClass;
        Integer currentValue;

        // what's interesting about this part of the code is that if 
        // there are two classes with the same number of occurances, 
        // then we're in trouble.
        Enumeration enumClass = hashT.keys();
        while (enumClass.hasMoreElements()) {
            currentClass = (Double)(enumClass.nextElement());
            currentValue = (Integer)(hashT.get(currentClass));
            if (currentValue.compareTo(mostFrequent) > 0) {
                greatestClass = currentClass;
                mostFrequent = currentValue;
            }
        }
        return greatestClass.doubleValue();

    }


	/*
	 * main method
	 */
	/*public static void main(String[] args) 
	{
            try {
               KNN knn = new KNN();
                for (int i = 0; i < args.length; i++) {
                    if (args[i].compareTo("-kvalue") == 0) {
                       knn.k = Integer.parseInt(args[i + 1]);
                   }
                }

		System.out.println(Evaluation.evaluateModel(knn, args));
            } catch (Exception e) {
		System.out.println(e.getMessage());
            }
         }*/
}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
人人精品人人爱| 亚洲欧美日韩在线不卡| 另类小说一区二区三区| 欧美一区二区三区免费大片| 五月天一区二区三区| 制服丝袜一区二区三区| 蜜臀a∨国产成人精品| 精品国产乱码久久久久久1区2区 | 国产日韩欧美综合在线| 成人综合婷婷国产精品久久免费| 国产精品免费观看视频| 91小视频免费观看| 调教+趴+乳夹+国产+精品| 91麻豆精品国产91久久久久久| 久久精品国产亚洲一区二区三区| 精品久久久久久久久久久久久久久久久 | 亚洲人成在线播放网站岛国| 色综合久久66| 日韩精品一级中文字幕精品视频免费观看 | 美国毛片一区二区| 国产欧美日韩三级| 色婷婷激情久久| 日本欧美一区二区| 久久久久久久性| 91精品办公室少妇高潮对白| 水野朝阳av一区二区三区| 精品国产免费视频| 91老司机福利 在线| 日日欢夜夜爽一区| 欧美激情艳妇裸体舞| 在线看国产日韩| 激情五月激情综合网| 综合网在线视频| 日韩免费福利电影在线观看| 不卡av在线网| 日本亚洲三级在线| 亚洲欧洲一区二区在线播放| 欧美一区二区三区四区视频| 波多野结衣91| 久久电影网站中文字幕| 亚洲黄色免费网站| 久久先锋资源网| 欧美性色欧美a在线播放| 国产精品一区二区久久不卡| 一区二区三区色| 麻豆精品新av中文字幕| 日韩久久一区二区| 精品日韩成人av| 717成人午夜免费福利电影| 成人永久aaa| 精品亚洲成a人| 香蕉乱码成人久久天堂爱免费| 中文字幕第一区综合| 日韩一二三四区| 欧美日韩一区二区三区在线看| 成人深夜福利app| 国内成人免费视频| 蜜臀av在线播放一区二区三区| 亚洲综合在线视频| 国产精品久久久久一区| 久久精品一区蜜桃臀影院| 日韩欧美成人午夜| 9191成人精品久久| 欧美日韩免费观看一区三区| bt7086福利一区国产| 国产999精品久久| 国产一区二区三区在线观看免费 | 国产成人午夜视频| 久久99精品久久久久久久久久久久| 亚洲一级电影视频| 亚洲精品乱码久久久久久久久 | 成人欧美一区二区三区| 中文字幕欧美日韩一区| 久久久久久久久久久电影| 精品欧美乱码久久久久久 | hitomi一区二区三区精品| 国产激情视频一区二区三区欧美| 久久精品国产秦先生| 麻豆国产精品一区二区三区 | 99精品久久久久久| www.欧美精品一二区| 丰满白嫩尤物一区二区| 成人av在线资源| 91在线无精精品入口| 99久久精品一区| av在线不卡免费看| 色综合天天综合网天天看片 | 欧美午夜免费电影| 色天使色偷偷av一区二区| 91精品福利视频| 538在线一区二区精品国产| 欧美高清一级片在线| 日韩欧美综合在线| 久久久噜噜噜久久人人看| 国产欧美久久久精品影院| 综合自拍亚洲综合图不卡区| 亚洲日本中文字幕区| 一二三区精品福利视频| 视频一区二区三区中文字幕| 蜜臀91精品一区二区三区| 欧美顶级少妇做爰| 日韩亚洲欧美中文三级| 欧美精品一区二区三区一线天视频 | 久久精品国产第一区二区三区| 精品一区二区三区在线播放视频 | 国产不卡在线播放| 色综合久久88色综合天天| 777a∨成人精品桃花网| 久久久久久久久久久久电影 | 精品婷婷伊人一区三区三| 91精品国产综合久久婷婷香蕉| 26uuu另类欧美| 亚洲人成精品久久久久| 日韩国产高清影视| 国产乱码精品一区二区三区av | 久久九九99视频| 亚洲欧美日韩国产手机在线| 日韩中文字幕亚洲一区二区va在线 | 色婷婷久久久亚洲一区二区三区| 欧美精品一级二级| 国产婷婷色一区二区三区 | 久久久不卡网国产精品二区| 亚洲日穴在线视频| 开心九九激情九九欧美日韩精美视频电影| 国产精品主播直播| 欧美视频中文一区二区三区在线观看| 日韩精品最新网址| 亚洲精品成人a在线观看| 麻豆精品在线看| 在线精品观看国产| 久久久久国色av免费看影院| 亚洲超碰97人人做人人爱| 国产成人小视频| 日韩视频一区在线观看| 亚洲欧美日韩国产一区二区三区| 久久99久久久久| 欧美日韩在线精品一区二区三区激情| 26uuu久久天堂性欧美| 午夜精品福利久久久| 色综合天天综合色综合av| 国产亚洲制服色| 蜜桃av一区二区在线观看| 色美美综合视频| 欧美国产激情二区三区| 美国欧美日韩国产在线播放| 欧美三级中文字幕在线观看| 国产精品女同一区二区三区| 久久99国产精品尤物| 欧美日韩国产色站一区二区三区| 国产精品无人区| 国产精品18久久久久| 日韩欧美国产一区二区三区| 亚洲国产精品综合小说图片区| jiyouzz国产精品久久| 国产人久久人人人人爽| 极品美女销魂一区二区三区| 欧美一级一区二区| 天天操天天干天天综合网| 在线中文字幕一区二区| 亚洲视频网在线直播| 99精品视频一区| 国产精品毛片大码女人| 国产成人综合自拍| 国产亚洲欧美日韩在线一区| 激情偷乱视频一区二区三区| 日韩精品中午字幕| 麻豆国产91在线播放| 日韩一区二区三区在线观看| 日韩成人一级大片| 在线播放日韩导航| 日本午夜精品视频在线观看| 欧美群妇大交群的观看方式| 午夜不卡av免费| 日韩一区二区三区免费看 | 久久99精品国产麻豆婷婷| 欧美大黄免费观看| 国产一区二区三区国产| 久久精品一区蜜桃臀影院| 粉嫩在线一区二区三区视频| 国产精品久久久久一区二区三区 | 国产日韩一级二级三级| 国产成人在线视频免费播放| 国产日产欧美一区二区视频| 成人免费三级在线| 亚洲欧洲性图库| 在线观看视频91| 午夜精品一区二区三区免费视频 | 麻豆精品视频在线| 精品国产青草久久久久福利| 国内成+人亚洲+欧美+综合在线| 国产亚洲欧美激情| 色悠久久久久综合欧美99| 亚洲大型综合色站| 日韩欧美精品在线视频| 夫妻av一区二区| 亚洲精品国产第一综合99久久 | 中文字幕亚洲不卡| 欧美性感一区二区三区| 老司机一区二区| 中文av字幕一区|