亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? smo.java

?? MacroWeka擴展了著名數據挖掘工具weka
?? JAVA
?? 第 1 頁 / 共 5 頁
字號:
      if ((y2 == -1) && (a2 == C2)) {
	m_I2.insert(i2);
      } else {
	m_I2.delete(i2);
      }
      if ((y2 == 1) && (a2 == C2)) {
	m_I3.insert(i2);
      } else {
	m_I3.delete(i2);
      }
      if ((y2 == -1) && (a2 == 0)) {
	m_I4.insert(i2);
      } else {
	m_I4.delete(i2);
      }
      
      // Update weight vector to reflect change a1 and a2, if linear SVM
      if (!m_useRBF && m_exponent == 1.0) {
	Instance inst1 = m_data.instance(i1);
	for (int p1 = 0; p1 < inst1.numValues(); p1++) {
	  if (inst1.index(p1) != m_data.classIndex()) {
	    m_weights[inst1.index(p1)] += 
	      y1 * (a1 - alph1) * inst1.valueSparse(p1);
	  }
	}
	Instance inst2 = m_data.instance(i2);
	for (int p2 = 0; p2 < inst2.numValues(); p2++) {
	  if (inst2.index(p2) != m_data.classIndex()) {
	    m_weights[inst2.index(p2)] += 
	      y2 * (a2 - alph2) * inst2.valueSparse(p2);
	  }
	}
      }
      
      // Update error cache using new Lagrange multipliers
      for (int j = m_I0.getNext(-1); j != -1; j = m_I0.getNext(j)) {
	if ((j != i1) && (j != i2)) {
	  m_errors[j] += 
	    y1 * (a1 - alph1) * m_kernel.eval(i1, j, m_data.instance(i1)) + 
	    y2 * (a2 - alph2) * m_kernel.eval(i2, j, m_data.instance(i2));
	}
      }
      
      // Update error cache for i1 and i2
      m_errors[i1] += y1 * (a1 - alph1) * k11 + y2 * (a2 - alph2) * k12;
      m_errors[i2] += y1 * (a1 - alph1) * k12 + y2 * (a2 - alph2) * k22;
      
      // Update array with Lagrange multipliers
      m_alpha[i1] = a1;
      m_alpha[i2] = a2;
      
      // Update thresholds
      m_bLow = -Double.MAX_VALUE; m_bUp = Double.MAX_VALUE;
      m_iLow = -1; m_iUp = -1;
      for (int j = m_I0.getNext(-1); j != -1; j = m_I0.getNext(j)) {
	if (m_errors[j] < m_bUp) {
	  m_bUp = m_errors[j]; m_iUp = j;
	}
	if (m_errors[j] > m_bLow) {
	  m_bLow = m_errors[j]; m_iLow = j;
	}
      }
      if (!m_I0.contains(i1)) {
	if (m_I3.contains(i1) || m_I4.contains(i1)) {
	  if (m_errors[i1] > m_bLow) {
	    m_bLow = m_errors[i1]; m_iLow = i1;
	  } 
	} else {
	  if (m_errors[i1] < m_bUp) {
	    m_bUp = m_errors[i1]; m_iUp = i1;
	  }
	}
      }
      if (!m_I0.contains(i2)) {
	if (m_I3.contains(i2) || m_I4.contains(i2)) {
	  if (m_errors[i2] > m_bLow) {
	    m_bLow = m_errors[i2]; m_iLow = i2;
	  }
	} else {
	  if (m_errors[i2] < m_bUp) {
	    m_bUp = m_errors[i2]; m_iUp = i2;
	  }
	}
      }
      if ((m_iLow == -1) || (m_iUp == -1)) {
	throw new Exception("This should never happen!");
      }

      // Made some progress.
      return true;
    }
  
    /**
     * Quick and dirty check whether the quadratic programming problem is solved.
     */
    protected void checkClassifier() throws Exception {

      double sum = 0;
      for (int i = 0; i < m_alpha.length; i++) {
	if (m_alpha[i] > 0) {
	  sum += m_class[i] * m_alpha[i];
	}
      }
      System.err.println("Sum of y(i) * alpha(i): " + sum);

      for (int i = 0; i < m_alpha.length; i++) {
	double output = SVMOutput(i, m_data.instance(i));
	if (Utils.eq(m_alpha[i], 0)) {
	  if (Utils.sm(m_class[i] * output, 1)) {
	    System.err.println("KKT condition 1 violated: " + m_class[i] * output);
	  }
	} 
	if (Utils.gr(m_alpha[i], 0) && 
	    Utils.sm(m_alpha[i], m_C * m_data.instance(i).weight())) {
	  if (!Utils.eq(m_class[i] * output, 1)) {
	    System.err.println("KKT condition 2 violated: " + m_class[i] * output);
	  }
	} 
	if (Utils.eq(m_alpha[i], m_C * m_data.instance(i).weight())) {
	  if (Utils.gr(m_class[i] * output, 1)) {
	    System.err.println("KKT condition 3 violated: " + m_class[i] * output);
	  }
	} 
      }
    }  
  }

  /** The filter to apply to the training data */
  public static final int FILTER_NORMALIZE = 0;
  public static final int FILTER_STANDARDIZE = 1;
  public static final int FILTER_NONE = 2;
  public static final Tag [] TAGS_FILTER = {
    new Tag(FILTER_NORMALIZE, "Normalize training data"),
    new Tag(FILTER_STANDARDIZE, "Standardize training data"),
    new Tag(FILTER_NONE, "No normalization/standardization"),
  };

  /** The binary classifier(s) */
  protected BinarySMO[][] m_classifiers = null;

  /** The exponent for the polynomial kernel. */
  protected double m_exponent = 1.0;
 
  /** Use lower-order terms? */
  protected boolean m_lowerOrder = false;
  
  /** Gamma for the RBF kernel. */
  protected double m_gamma = 0.01;
  
  /** The complexity parameter. */
  protected double m_C = 1.0;
  
  /** Epsilon for rounding. */
  protected double m_eps = 1.0e-12;
  
  /** Tolerance for accuracy of result. */
  protected double m_tol = 1.0e-3;

  /** Whether to normalize/standardize/neither */
  protected int m_filterType = FILTER_NORMALIZE;
  
  /** Feature-space normalization? */
  protected boolean m_featureSpaceNormalization = false;
  
  /** Use RBF kernel? (default: poly) */
  protected boolean m_useRBF = false;
  
  /** The size of the cache (a prime number) */
  protected int m_cacheSize = 250007;

  /** The filter used to make attributes numeric. */
  protected NominalToBinary m_NominalToBinary;

  /** The filter used to standardize/normalize all values. */
  protected Filter m_Filter = null;

  /** The filter used to get rid of missing values. */
  protected ReplaceMissingValues m_Missing;

  /** Only numeric attributes in the dataset? */
  protected boolean m_onlyNumeric;

  /** The class index from the training data */
  protected int m_classIndex = -1;

  /** The class attribute */
  protected Attribute m_classAttribute;

  /** Turn off all checks and conversions? Turning them off assumes
      that data is purely numeric, doesn't contain any missing values,
      and has a nominal class. Turning them off also means that
      no header information will be stored if the machine is linear. 
      Finally, it also assumes that no instance has a weight equal to 0.*/
  protected boolean m_checksTurnedOff;

  /** Precision constant for updating sets */
  protected static double m_Del = 1000 * Double.MIN_VALUE;

  /** Whether logistic models are to be fit */
  protected boolean m_fitLogisticModels = false;

  /** The number of folds for the internal cross-validation */
  protected int m_numFolds = -1;

  /** The random number seed  */
  protected int m_randomSeed = 1;

  /**
   * Turns off checks for missing values, etc. Use with caution.
   */
  public void turnChecksOff() {

    m_checksTurnedOff = true;
  }

  /**
   * Turns on checks for missing values, etc.
   */
  public void turnChecksOn() {

    m_checksTurnedOff = false;
  }

  /**
   * Method for building the classifier. Implements a one-against-one
   * wrapper for multi-class problems.
   *
   * @param insts the set of training instances
   * @exception Exception if the classifier can't be built successfully
   */
  public void buildClassifier(Instances insts) throws Exception {

    if (!m_checksTurnedOff) {
      if (insts.checkForStringAttributes()) {
	throw new UnsupportedAttributeTypeException("Cannot handle string attributes!");
      }
      if (insts.classAttribute().isNumeric()) {
	throw new UnsupportedClassTypeException("SMO can't handle a numeric class! Use"
						+ "SMOreg for performing regression.");
      }
      insts = new Instances(insts);
      insts.deleteWithMissingClass();
      if (insts.numInstances() == 0) {
	throw new Exception("No training instances without a missing class!");
      }

      
      /* Removes all the instances with weight equal to 0.
	 MUST be done since condition (8) of Keerthi's paper 
	 is made with the assertion Ci > 0 (See equation (3a). */
      Instances data = new Instances(insts, insts.numInstances());
      for(int i = 0; i < insts.numInstances(); i++){
	if(insts.instance(i).weight() > 0)
	  data.add(insts.instance(i));
      }
      if (data.numInstances() == 0) {
	throw new Exception("No training instances left after removing " + 
			    "instance with either a weight null or a missing class!");
      }
      insts = data;
      
    }

    m_onlyNumeric = true;
    if (!m_checksTurnedOff) {
      for (int i = 0; i < insts.numAttributes(); i++) {
	if (i != insts.classIndex()) {
	  if (!insts.attribute(i).isNumeric()) {
	    m_onlyNumeric = false;
	    break;
	  }
	}
      }
    }

    if (!m_checksTurnedOff) {
      m_Missing = new ReplaceMissingValues();
      m_Missing.setInputFormat(insts);
      insts = Filter.useFilter(insts, m_Missing); 
    } else {
      m_Missing = null;
    }

    if (!m_onlyNumeric) {
      m_NominalToBinary = new NominalToBinary();
      m_NominalToBinary.setInputFormat(insts);
      insts = Filter.useFilter(insts, m_NominalToBinary);
    } else {
      m_NominalToBinary = null;
    }

    if (m_filterType == FILTER_STANDARDIZE) {
      m_Filter = new Standardize();
      m_Filter.setInputFormat(insts);
      insts = Filter.useFilter(insts, m_Filter); 
    } else if (m_filterType == FILTER_NORMALIZE) {
      m_Filter = new Normalize();
      m_Filter.setInputFormat(insts);
      insts = Filter.useFilter(insts, m_Filter); 
    } else {
      m_Filter = null;
    }

    m_classIndex = insts.classIndex();
    m_classAttribute = insts.classAttribute();

    // Generate subsets representing each class
    Instances[] subsets = new Instances[insts.numClasses()];
    for (int i = 0; i < insts.numClasses(); i++) {
      subsets[i] = new Instances(insts, insts.numInstances());
    }
    for (int j = 0; j < insts.numInstances(); j++) {
      Instance inst = insts.instance(j);
      subsets[(int)inst.classValue()].add(inst);
    }
    for (int i = 0; i < insts.numClasses(); i++) {
      subsets[i].compactify();
    }

    // Build the binary classifiers
    Random rand = new Random(m_randomSeed);
    m_classifiers = new BinarySMO[insts.numClasses()][insts.numClasses()];
    for (int i = 0; i < insts.numClasses(); i++) {
      for (int j = i + 1; j < insts.numClasses(); j++) {
	m_classifiers[i][j] = new BinarySMO();
	Instances data = new Instances(insts, insts.numInstances());
	for (int k = 0; k < subsets[i].numInstances(); k++) {
	  data.add(subsets[i].instance(k));
	}
	for (int k = 0; k < subsets[j].numInstances(); k++) {
	  data.add(subsets[j].instance(k));
	}
	data.compactify();
	data.randomize(rand);
	m_classifiers[i][j].buildClassifier(data, i, j, 
					    m_fitLogisticModels,
					    m_numFolds, m_randomSeed);
      }
    }
  }

  /**
   * Estimates class probabilities for given instance.
   */
  public double[] distributionForInstance(Instance inst) throws Exception {

    // Filter instance
    if (!m_checksTurnedOff) {
      m_Missing.input(inst);
      m_Missing.batchFinished();
      inst = m_Missing.output();
    }

    if (!m_onlyNumeric) {
      m_NominalToBinary.input(inst);
      m_NominalToBinary.batchFinished();
      inst = m_NominalToBinary.output();
    }
    
    if (m_Filter != null) {
      m_Filter.input(inst);
      m_Filter.batchFinished();
      inst = m_Filter.output();
    }
    
    if (!m_fitLogisticModels) {
      double[] result = new double[inst.numClasses()];
      for (int i = 0; i < inst.numClasses(); i++) {
	for (int j = i + 1; j < inst.numClasses(); j++) {
	  if ((m_classifiers[i][j].m_alpha != null) || 
	      (m_classifiers[i][j].m_sparseWeights != null)) {
	    double output = m_classifiers[i][j].SVMOutput(-1, inst);
	    if (output > 0) {
	      result[j] += 1;
	    } else {
	      result[i] += 1;
	    }
	  }
	} 
      }
      Utils.normalize(result);
      return result;
    } else {

      // We only need to do pairwise coupling if there are more
      // then two classes.
      if (inst.numClasses() == 2) {
	double[] newInst = new double[2];
	newInst[0] = m_classifiers[0][1].SVMOutput(-1, inst);
	newInst[1] = Instance.missingValue();
	return m_classifiers[0][1].m_logistic.
	  distributionForInstance(new Instance(1, newInst));
      }
      double[][] r = new double[inst.numClasses()][inst.numClasses()];
      double[][] n = new double[inst.numClasses()][inst.numClasses()];
      for (int i = 0; i < inst.numClasses(); i++) {
	for (int j = i + 1; j < inst.numClasses(); j++) {
	  if ((m_classifiers[i][j].m_alpha != null) || 
	      (m_classifiers[i][j].m_sparseWeights != null)) {
	    double[] newInst = new double[2];
	    newInst[0] = m_classifiers[i][j].SVMOutput(-1, inst);
	    newInst[1] = Instance.missingValue();
	    r[i][j] = m_classifiers[i][j].m_logistic.
	      distributionForInstance(new Instance(1, newInst))[0];
	    n[i][j] = m_classifiers[i][j].m_sumOfWeights;
	  }
	}
      }
      return pairwiseCoupling(n, r);
    }
  }

  /**
   * Implements pairwise coupling.
   *
   * @param n the sum of weights used to train each model
   * @param r the probability estimate from each model
   * @return the coupled estimates
   */
  public double[] pairwiseCoupling(double[][] n, double[][] r) {

    // Initialize p and u array
    double[] p = new double[r.length];
    for (int i =0; i < p.length; i++) {

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
成人免费视频一区| 欧美日韩国产不卡| 日日夜夜精品免费视频| 国产午夜一区二区三区| 欧美另类变人与禽xxxxx| 丁香激情综合五月| 日韩高清不卡一区二区三区| 国产精品久久午夜| 精品美女在线观看| 欧美无人高清视频在线观看| 国产成人在线色| 裸体一区二区三区| 国产精品原创巨作av| 亚洲最大的成人av| 国产精品免费视频观看| 26uuu国产在线精品一区二区| 欧美日本一区二区| 在线观看一区二区视频| 成人福利电影精品一区二区在线观看 | 国产精华液一区二区三区| 亚洲成av人片一区二区| 亚洲色图一区二区| 国产精品久久久久久久久久免费看| 91精品国产91热久久久做人人| 91国偷自产一区二区开放时间| eeuss影院一区二区三区| 国内精品免费**视频| 蜜臀久久久久久久| 天天综合色天天综合色h| 亚洲午夜成aⅴ人片| 亚洲精品国产一区二区三区四区在线 | 欧美卡1卡2卡| 欧美日韩亚洲国产综合| 色94色欧美sute亚洲线路一久 | 欧美一级xxx| 7777精品伊人久久久大香线蕉| 欧美日韩一区三区| 欧美精品乱码久久久久久按摩| 欧美手机在线视频| 欧美视频自拍偷拍| 欧美日韩在线亚洲一区蜜芽| 在线免费观看一区| 欧美日韩精品一区二区三区四区| 欧美性大战久久| 欧美高清dvd| 欧美va亚洲va| 国产日韩影视精品| 中文字幕制服丝袜成人av| 成人欧美一区二区三区黑人麻豆| 综合激情成人伊人| 亚洲精品老司机| 亚洲成a人片综合在线| 日本一区中文字幕| 国产伦精品一区二区三区免费| 国产精品夜夜爽| 成人免费观看视频| 在线视频国内自拍亚洲视频| 欧美女孩性生活视频| 欧美成va人片在线观看| 国产调教视频一区| 一区二区三区在线观看视频| 手机精品视频在线观看| 狠狠色狠狠色综合| 97久久精品人人爽人人爽蜜臀| 色94色欧美sute亚洲线路一久| 91精品国产综合久久精品麻豆| 日韩精品一区国产麻豆| 欧美激情一二三区| 亚洲亚洲人成综合网络| 久久精品理论片| 成人av网址在线| 欧美日韩日日摸| 2017欧美狠狠色| 亚洲同性gay激情无套| 日日摸夜夜添夜夜添精品视频| 国产在线播精品第三| 一本一本久久a久久精品综合麻豆| 在线不卡一区二区| 日本一区二区免费在线| 午夜日韩在线电影| 国产高清精品在线| 欧美日韩mp4| 国产精品久久福利| 青青草97国产精品免费观看 | 欧美三级韩国三级日本一级| www精品美女久久久tv| 亚洲精品国产视频| 麻豆一区二区在线| 日本韩国视频一区二区| 欧美va亚洲va香蕉在线| 伊人婷婷欧美激情| 国产福利91精品一区| 91精品国产综合久久福利 | 国产精品福利av| 久久精品国产亚洲高清剧情介绍| 色猫猫国产区一区二在线视频| 欧美videos大乳护士334| 亚洲在线中文字幕| 不卡一区中文字幕| 精品欧美一区二区在线观看| 亚洲第一福利视频在线| 成人av电影观看| 久久亚洲影视婷婷| 日韩av电影免费观看高清完整版 | 亚洲综合久久av| 成人综合在线视频| 日韩精品在线看片z| 五月激情综合婷婷| 色悠悠亚洲一区二区| 日本一区二区视频在线| 国产在线视频不卡二| 正在播放亚洲一区| 亚洲高清在线精品| 99久久精品国产导航| 国产午夜精品在线观看| 麻豆视频一区二区| 88在线观看91蜜桃国自产| 玉足女爽爽91| 色av成人天堂桃色av| 亚洲美女区一区| proumb性欧美在线观看| 国产欧美日韩麻豆91| 国产一区二区三区免费| 久久综合久久综合久久| 美腿丝袜一区二区三区| 91精品国模一区二区三区| 亚洲成精国产精品女| 欧美日韩一区国产| 亚洲成人tv网| 欧美剧情片在线观看| 午夜精品视频一区| 欧美日韩在线观看一区二区| 亚洲免费观看高清完整版在线观看 | 久久成人免费网站| 日韩写真欧美这视频| 精品中文av资源站在线观看| 日韩欧美国产一区二区在线播放 | 精品无人码麻豆乱码1区2区| 日韩免费一区二区三区在线播放| 免费观看30秒视频久久| 日韩欧美一二三| 久久99久久精品| 久久久精品国产免大香伊| 国产成人精品免费一区二区| 国产精品乱码久久久久久| av在线免费不卡| 亚洲精品自拍动漫在线| 在线观看91视频| 天堂影院一区二区| 91精品国产色综合久久不卡电影 | 综合亚洲深深色噜噜狠狠网站| 99精品黄色片免费大全| 亚洲国产精品欧美一二99| 在线电影一区二区三区| 国产资源在线一区| 中文字幕日韩一区二区| 欧美最猛性xxxxx直播| 免费观看在线色综合| 久久精品水蜜桃av综合天堂| k8久久久一区二区三区 | 日本系列欧美系列| 久久久精品人体av艺术| 91在线国内视频| 日韩**一区毛片| 久久久久青草大香线综合精品| av中文字幕不卡| 日韩一区欧美二区| 国产香蕉久久精品综合网| 91国在线观看| 久久99精品国产麻豆婷婷洗澡| 欧美国产精品中文字幕| 欧美日韩高清一区二区三区| 国产一区二区按摩在线观看| 亚洲免费三区一区二区| 日韩精品一区二区三区视频| av午夜一区麻豆| 亚洲福利视频三区| 国产日韩欧美a| 欧美色图一区二区三区| 国产一区二区三区综合| 亚洲免费av高清| 亚洲精品一线二线三线无人区| 91麻豆国产福利在线观看| 日本伊人色综合网| 成人欧美一区二区三区小说| 日韩精品一区二区三区中文不卡| 91丝袜呻吟高潮美腿白嫩在线观看| 日韩有码一区二区三区| 亚洲天堂免费在线观看视频| 91精品国产综合久久精品| 成人av网址在线| 蜜桃精品在线观看| 亚洲黄色录像片| 中文字幕乱码一区二区免费| 91精品国产美女浴室洗澡无遮挡| av一本久道久久综合久久鬼色| 精品亚洲成av人在线观看| 亚洲一本大道在线| 亚洲欧洲三级电影| 国产亚洲精品久|