亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? linearregression.java

?? Weka
?? JAVA
?? 第 1 頁 / 共 2 頁
字號:
    /**   * Set the value of Ridge.   *   * @param newRidge Value to assign to Ridge.   */  public void setRidge(double newRidge) {        m_Ridge = newRidge;  }    /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String eliminateColinearAttributesTipText() {    return "Eliminate colinear attributes.";  }  /**   * Get the value of EliminateColinearAttributes.   *   * @return Value of EliminateColinearAttributes.   */  public boolean getEliminateColinearAttributes() {        return m_EliminateColinearAttributes;  }    /**   * Set the value of EliminateColinearAttributes.   *   * @param newEliminateColinearAttributes Value to assign to EliminateColinearAttributes.   */  public void setEliminateColinearAttributes(boolean newEliminateColinearAttributes) {        m_EliminateColinearAttributes = newEliminateColinearAttributes;  }    /**   * Get the number of coefficients used in the model   *   * @return the number of coefficients   */  public int numParameters()  {    return m_Coefficients.length-1;  }  /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String attributeSelectionMethodTipText() {    return "Set the method used to select attributes for use in the linear "      +"regression. Available methods are: no attribute selection, attribute "      +"selection using M5's method (step through the attributes removing the one "      +"with the smallest standardised coefficient until no improvement is observed "      +"in the estimate of the error given by the Akaike "      +"information criterion), and a greedy selection using the Akaike information "      +"metric.";  }  /**   * Sets the method used to select attributes for use in the   * linear regression.    *   * @param method the attribute selection method to use.   */  public void setAttributeSelectionMethod(SelectedTag method) {        if (method.getTags() == TAGS_SELECTION) {      m_AttributeSelection = method.getSelectedTag().getID();    }  }  /**   * Gets the method used to select attributes for use in the   * linear regression.    *   * @return the method to use.   */  public SelectedTag getAttributeSelectionMethod() {        return new SelectedTag(m_AttributeSelection, TAGS_SELECTION);  }  /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String debugTipText() {    return "Outputs debug information to the console.";  }  /**   * Controls whether debugging output will be printed   *   * @param debug true if debugging output should be printed   */  public void setDebug(boolean debug) {    b_Debug = debug;  }  /**   * Controls whether debugging output will be printed   *   * @return true if debugging output is printed   */  public boolean getDebug() {    return b_Debug;  }  /**   * Removes the attribute with the highest standardised coefficient   * greater than 1.5 from the selected attributes.   *   * @param selectedAttributes an array of flags indicating which    * attributes are included in the regression model   * @param coefficients an array of coefficients for the regression   * model   * @return true if an attribute was removed   */  private boolean deselectColinearAttributes(boolean [] selectedAttributes,					     double [] coefficients) {    double maxSC = 1.5;    int maxAttr = -1, coeff = 0;    for (int i = 0; i < selectedAttributes.length; i++) {      if (selectedAttributes[i]) {	double SC = Math.abs(coefficients[coeff] * m_StdDevs[i] 			     / m_ClassStdDev);	if (SC > maxSC) {	  maxSC = SC;	  maxAttr = i;	}	coeff++;      }    }    if (maxAttr >= 0) {      selectedAttributes[maxAttr] = false;      if (b_Debug) {	System.out.println("Deselected colinear attribute:" + (maxAttr + 1)			   + " with standardised coefficient: " + maxSC);      }      return true;    }    return false;  }  /**   * Performs a greedy search for the best regression model using   * Akaike's criterion.   *   * @throws Exception if regression can't be done   */  private void findBestModel() throws Exception {    // For the weighted case we still use numInstances in    // the calculation of the Akaike criterion.     int numInstances = m_TransformedData.numInstances();    if (b_Debug) {      System.out.println((new Instances(m_TransformedData, 0)).toString());    }    // Perform a regression for the full model, and remove colinear attributes    do {      m_Coefficients = doRegression(m_SelectedAttributes);    } while (m_EliminateColinearAttributes && 	     deselectColinearAttributes(m_SelectedAttributes, m_Coefficients));    // Figure out current number of attributes + 1. (We treat this model    // as the full model for the Akaike-based methods.)    int numAttributes = 1;    for (int i = 0; i < m_SelectedAttributes.length; i++) {      if (m_SelectedAttributes[i]) {	numAttributes++;      }    }    double fullMSE = calculateSE(m_SelectedAttributes, m_Coefficients);    double akaike = (numInstances - numAttributes) + 2 * numAttributes;    if (b_Debug) {      System.out.println("Initial Akaike value: " + akaike);    }    boolean improved;    int currentNumAttributes = numAttributes;    switch (m_AttributeSelection) {    case SELECTION_GREEDY:      // Greedy attribute removal      do {	boolean [] currentSelected = (boolean []) m_SelectedAttributes.clone();	improved = false;	currentNumAttributes--;	for (int i = 0; i < m_SelectedAttributes.length; i++) {	  if (currentSelected[i]) {	    // Calculate the akaike rating without this attribute	    currentSelected[i] = false;	    double [] currentCoeffs = doRegression(currentSelected);	    double currentMSE = calculateSE(currentSelected, currentCoeffs);	    double currentAkaike = currentMSE / fullMSE 	      * (numInstances - numAttributes)	      + 2 * currentNumAttributes;	    if (b_Debug) {	      System.out.println("(akaike: " + currentAkaike);	    }	    // If it is better than the current best	    if (currentAkaike < akaike) {	      if (b_Debug) {		System.err.println("Removing attribute " + (i + 1)				   + " improved Akaike: " + currentAkaike);	      }	      improved = true;	      akaike = currentAkaike;	      System.arraycopy(currentSelected, 0,			       m_SelectedAttributes, 0,			       m_SelectedAttributes.length);	      m_Coefficients = currentCoeffs;	    }	    currentSelected[i] = true;	  }	}      } while (improved);      break;    case SELECTION_M5:      // Step through the attributes removing the one with the smallest       // standardised coefficient until no improvement in Akaike      do {	improved = false;	currentNumAttributes--;	// Find attribute with smallest SC	double minSC = 0;	int minAttr = -1, coeff = 0;	for (int i = 0; i < m_SelectedAttributes.length; i++) {	  if (m_SelectedAttributes[i]) {	    double SC = Math.abs(m_Coefficients[coeff] * m_StdDevs[i] 				 / m_ClassStdDev);	    if ((coeff == 0) || (SC < minSC)) {	      minSC = SC;	      minAttr = i;	    }	    coeff++;	  }	}	// See whether removing it improves the Akaike score	if (minAttr >= 0) {	  m_SelectedAttributes[minAttr] = false;	  double [] currentCoeffs = doRegression(m_SelectedAttributes);	  double currentMSE = calculateSE(m_SelectedAttributes, currentCoeffs);	  double currentAkaike = currentMSE / fullMSE 	    * (numInstances - numAttributes)	    + 2 * currentNumAttributes;	  if (b_Debug) {	    System.out.println("(akaike: " + currentAkaike);	  }	  // If it is better than the current best	  if (currentAkaike < akaike) {	    if (b_Debug) {	      System.err.println("Removing attribute " + (minAttr + 1)				 + " improved Akaike: " + currentAkaike);	    }	    improved = true;	    akaike = currentAkaike;	    m_Coefficients = currentCoeffs;	  } else {	    m_SelectedAttributes[minAttr] = true;	  }	}      } while (improved);      break;    case SELECTION_NONE:      break;    }  }  /**   * Calculate the squared error of a regression model on the    * training data   *   * @param selectedAttributes an array of flags indicating which    * attributes are included in the regression model   * @param coefficients an array of coefficients for the regression   * model   * @return the mean squared error on the training data   * @throws Exception if there is a missing class value in the training   * data   */  private double calculateSE(boolean [] selectedAttributes, 			      double [] coefficients) throws Exception {    double mse = 0;    for (int i = 0; i < m_TransformedData.numInstances(); i++) {      double prediction = regressionPrediction(m_TransformedData.instance(i),					       selectedAttributes,					       coefficients);      double error = prediction - m_TransformedData.instance(i).classValue();      mse += error * error;    }    return mse;  }  /**   * Calculate the dependent value for a given instance for a   * given regression model.   *   * @param transformedInstance the input instance   * @param selectedAttributes an array of flags indicating which    * attributes are included in the regression model   * @param coefficients an array of coefficients for the regression   * model   * @return the regression value for the instance.   * @throws Exception if the class attribute of the input instance   * is not assigned   */  private double regressionPrediction(Instance transformedInstance,				      boolean [] selectedAttributes,				      double [] coefficients)   throws Exception {        double result = 0;    int column = 0;    for (int j = 0; j < transformedInstance.numAttributes(); j++) {      if ((m_ClassIndex != j) 	  && (selectedAttributes[j])) {	result += coefficients[column] * transformedInstance.value(j);	column++;      }    }    result += coefficients[column];        return result;  }  /**   * Calculate a linear regression using the selected attributes   *   * @param selectedAttributes an array of booleans where each element   * is true if the corresponding attribute should be included in the   * regression.   * @return an array of coefficients for the linear regression model.   * @throws Exception if an error occurred during the regression.   */  private double [] doRegression(boolean [] selectedAttributes)   throws Exception {    if (b_Debug) {      System.out.print("doRegression(");      for (int i = 0; i < selectedAttributes.length; i++) {	System.out.print(" " + selectedAttributes[i]);      }      System.out.println(" )");    }    int numAttributes = 0;    for (int i = 0; i < selectedAttributes.length; i++) {      if (selectedAttributes[i]) {	numAttributes++;      }    }    // Check whether there are still attributes left    Matrix independent = null, dependent = null;    double[] weights = null;    if (numAttributes > 0) {      independent = new Matrix(m_TransformedData.numInstances(), 			       numAttributes);      dependent = new Matrix(m_TransformedData.numInstances(), 1);      for (int i = 0; i < m_TransformedData.numInstances(); i ++) {	Instance inst = m_TransformedData.instance(i);	int column = 0;	for (int j = 0; j < m_TransformedData.numAttributes(); j++) {	  if (j == m_ClassIndex) {	    dependent.setElement(i, 0, inst.classValue());	  } else {	    if (selectedAttributes[j]) {	      double value = inst.value(j) - m_Means[j];	      	      // We only need to do this if we want to	      // scale the input	      if (!m_checksTurnedOff) {		value /= m_StdDevs[j];	      }	      independent.setElement(i, column, value);	      column++;	    }	  }	}      }            // Grab instance weights      weights = new double [m_TransformedData.numInstances()];      for (int i = 0; i < weights.length; i++) {	weights[i] = m_TransformedData.instance(i).weight();      }    }    // Compute coefficients (note that we have to treat the    // intercept separately so that it doesn't get affected    // by the ridge constant.)    double[] coefficients = new double[numAttributes + 1];    if (numAttributes > 0) {      double[] coeffsWithoutIntercept  =	independent.regression(dependent, weights, m_Ridge);      System.arraycopy(coeffsWithoutIntercept, 0, coefficients, 0,		       numAttributes);    }    coefficients[numAttributes] = m_ClassMean;	       // Convert coefficients into original scale    int column = 0;    for(int i = 0; i < m_TransformedData.numAttributes(); i++) {      if ((i != m_TransformedData.classIndex()) &&	  (selectedAttributes[i])) {	// We only need to do this if we have scaled the	// input.	if (!m_checksTurnedOff) {	  coefficients[column] /= m_StdDevs[i];	}	// We have centred the input	coefficients[coefficients.length - 1] -= 	  coefficients[column] * m_Means[i];	column++;      }    }    return coefficients;  }   /**   * Generates a linear regression function predictor.   *   * @param argv the options   */  public static void main(String argv[]) {    runClassifier(new LinearRegression(), argv);  }}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
日韩福利视频导航| av男人天堂一区| 成人免费观看视频| 91精品国产高清一区二区三区| 亚洲精品在线观| 亚洲精品va在线观看| 国产美女在线精品| 制服丝袜国产精品| 亚洲麻豆国产自偷在线| 久久99国产精品久久99 | 国产风韵犹存在线视精品| 欧美日韩情趣电影| 国产精品丝袜一区| 六月婷婷色综合| 欧美日韩精品一区二区三区| 亚洲欧洲精品一区二区精品久久久| 久久成人羞羞网站| 51午夜精品国产| 亚洲五码中文字幕| 日本韩国欧美三级| 一区精品在线播放| 国产精品一二二区| 26uuu精品一区二区在线观看| 石原莉奈在线亚洲三区| 欧美色综合影院| 亚洲免费观看高清在线观看| 国产乱对白刺激视频不卡| 91精品婷婷国产综合久久| 亚洲综合色视频| 91福利在线播放| 亚洲最新在线观看| 欧美日韩在线三区| 亚洲国产精品自拍| 91.麻豆视频| 亚洲va欧美va人人爽午夜| 欧美唯美清纯偷拍| 日韩中文字幕麻豆| 91精品国产美女浴室洗澡无遮挡| 亚洲风情在线资源站| 在线观看91精品国产入口| 亚洲国产精品久久久男人的天堂 | 欧美猛男超大videosgay| 亚洲美女偷拍久久| 欧美日韩免费在线视频| 亚洲大片免费看| 91精品国产欧美日韩| 精品一区中文字幕| 久久精品亚洲精品国产欧美| 国产91精品一区二区麻豆网站| 国产蜜臀97一区二区三区| 成人夜色视频网站在线观看| 日韩一区在线播放| 欧美精品一二三区| 久久99国产精品久久99| 国产精品五月天| 欧美日韩情趣电影| 国产精品亚洲综合一区在线观看| 欧美国产在线观看| 欧美视频精品在线| 九九九久久久精品| 亚洲视频 欧洲视频| 91精品国产免费| www.色综合.com| 亚洲v中文字幕| 国产日韩综合av| 在线精品视频免费播放| 精品无码三级在线观看视频| 国产精品拍天天在线| 欧美另类高清zo欧美| 国产精一区二区三区| 一区二区三区91| 久久久久久久av麻豆果冻| 91精品福利视频| 国产精品一二三区| 水野朝阳av一区二区三区| 中文一区一区三区高中清不卡| 欧美日韩国产乱码电影| 成人精品免费看| 免费人成精品欧美精品| 最新热久久免费视频| 欧美成人综合网站| 在线国产亚洲欧美| 国产91精品一区二区麻豆网站| 天天影视色香欲综合网老头| 国产亚洲一二三区| 欧美久久免费观看| 在线视频国内自拍亚洲视频| 国产精品资源在线观看| 秋霞电影网一区二区| 亚洲精品乱码久久久久久黑人| wwww国产精品欧美| 欧美男人的天堂一二区| 国产成人综合视频| 毛片av一区二区三区| 亚洲国产日产av| 中文字幕中文字幕一区二区| 久久久亚洲欧洲日产国码αv| 777午夜精品视频在线播放| 91丨九色丨蝌蚪丨老版| 成人午夜激情影院| 国产老女人精品毛片久久| 亚洲一区二区成人在线观看| 国产精品久久久久久亚洲毛片| 精品久久久久久久久久久久久久久久久 | 怡红院av一区二区三区| 国产亚洲欧美中文| 精品免费日韩av| 欧美肥胖老妇做爰| 欧美日韩国产一区| 欧美日韩在线免费视频| 91极品视觉盛宴| 在线日韩一区二区| 欧美在线小视频| 欧美综合一区二区三区| 97超碰欧美中文字幕| 99久久er热在这里只有精品66| 国产二区国产一区在线观看| 国产盗摄女厕一区二区三区 | 婷婷一区二区三区| 午夜精品一区在线观看| 日韩专区在线视频| 秋霞电影网一区二区| 黄网站免费久久| 国产91在线观看| 色一区在线观看| 欧美午夜在线一二页| 欧美日韩国产成人在线免费| 欧美巨大另类极品videosbest | 午夜在线成人av| 婷婷国产在线综合| 久久精品国产网站| 国产精品综合一区二区| 99麻豆久久久国产精品免费优播| 99久久99久久久精品齐齐| 日本韩国欧美一区| 欧美一区二区三区四区视频| www成人在线观看| 成人免费一区二区三区视频 | 日韩av二区在线播放| 麻豆精品蜜桃视频网站| 国产精品一区在线观看你懂的| 丁香激情综合五月| 欧美性猛交xxxx乱大交退制版| 欧美视频一区二| 欧美大胆人体bbbb| 一区精品在线播放| 日韩国产欧美三级| 福利一区二区在线| 欧美日韩卡一卡二| 久久免费精品国产久精品久久久久| 国产精品成人免费| 日韩成人午夜精品| av资源网一区| 日韩一卡二卡三卡| 亚洲欧美另类图片小说| 另类的小说在线视频另类成人小视频在线 | 精品久久久久久久久久久久包黑料 | 亚洲国产综合色| 国产一区二区三区电影在线观看| 99久久免费精品高清特色大片| 欧美精品一二三| 国产精品久99| 国内偷窥港台综合视频在线播放| 99精品欧美一区二区三区综合在线| 欧美一区二区三级| 亚洲欧美日韩在线播放| 久久99精品网久久| 欧美亚洲动漫另类| 亚洲欧洲日韩综合一区二区| 日韩黄色免费网站| 色偷偷88欧美精品久久久| 国产视频一区二区在线| 青青草精品视频| 91黄色免费版| 中文字幕五月欧美| 狠狠狠色丁香婷婷综合久久五月| 在线观看av一区| 国产精品成人在线观看| 精品一区二区三区免费| 91麻豆精品国产91久久久久久久久| 中文字幕一区二区三区乱码在线| 国产一区中文字幕| 欧美一区三区二区| 亚洲综合色丁香婷婷六月图片| 成人激情黄色小说| 国产日韩在线不卡| 国内精品国产成人国产三级粉色| 欧美日韩国产色站一区二区三区| 亚洲免费av观看| 91影视在线播放| 亚洲色图一区二区| eeuss国产一区二区三区| 中文字幕精品综合| 成人听书哪个软件好| 国产三级一区二区| 国产不卡免费视频| 国产欧美一区二区精品秋霞影院| 狠狠色狠狠色综合日日91app| 日韩欧美色综合| 美国十次综合导航|