亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來(lái)到蟲(chóng)蟲(chóng)下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲(chóng)蟲(chóng)下載站

?? lbfgsbasictrainer.java

?? dragontoolkit用于機(jī)器學(xué)習(xí)
?? JAVA
字號(hào):
package dragon.ml.seqmodel.crf;

import dragon.matrix.*;
import dragon.ml.seqmodel.data.DataSequence;
import dragon.ml.seqmodel.data.Dataset;
import dragon.ml.seqmodel.feature.Feature;
import dragon.ml.seqmodel.feature.FeatureGenerator;
import dragon.ml.seqmodel.model.ModelGraph;
import dragon.util.MathUtil;

/**
 * <p>LBFGS basic trainer</p>
 * <p> </p>
 * <p>Copyright: Copyright (c) 2005</p>
 * <p>Company: IST, Drexel University</p>
 * @author Davis Zhou
 * @version 1.0
 */


public class LBFGSBasicTrainer extends AbstractTrainer{
    protected int mForHessian;
    protected double epsForConvergence, invSigmaSquare;

    public LBFGSBasicTrainer(ModelGraph model, FeatureGenerator featureGenerator) {
        super(model, featureGenerator);
        mForHessian=7;
        epsForConvergence=0.001;
        invSigmaSquare=0.01;
    }

    public void setGradientHistory(int history){
        mForHessian=history;
    }

    public void setAccuracy(int eps){
        this.epsForConvergence=eps;
    }

    public void setInvSigmaSquare(int invSigmaSquare){
        this.invSigmaSquare =invSigmaSquare;
    }

    public boolean train(Dataset dataset) {
        double gradLogli[], diag[], f;
        int iprint[], iflag[], icall, featureNum;

        //conver labels to states
        dataset.startScan();
        while(dataset.hasNext())
            model.mapLabelToState(dataset.next());

        //train features
        if(!featureGenerator.train(dataset))
            return false;
        featureNum=featureGenerator.getFeatureNum();
        lambda=new double[featureNum];
        gradLogli = new double[featureNum];
        diag = new double [featureNum];
        iprint= new int [2];
        iflag= new int[1];
        icall=0;
        iprint [0] =-1;
        iprint [1] =0;
        iflag[0]=0;

        for (int j = 0 ; j < lambda.length ; j ++) {
            lambda[j] =0; //initialize the parameters
        }

        do {
            f = computeFunctionGradient(dataset, lambda,gradLogli);
            System.out.println((new java.util.Date()).toString()+ " Iteration: " + icall + " log likelihood "+f + " norm(grad logli) " + norm(gradLogli) + " norm(x) "+ norm(lambda));

            // since the routine below minimizes and we want to maximize log likelihood
            f = -1*f;
            for (int j = 0 ; j < lambda.length ; j ++) {
                gradLogli[j] *= -1;
            }

            try	{
                //numF: number of features, i.e. variables
                //mForHessian: number of past gradients and updates (a number between 3 and 7 is recommended
                //lambda: variables containing initial values
                //f: value of objective function (here is the log likelihood
                //gradLogli: the gradient vector of the current iteration
                //false: the diagonal matrix Hk0 is provided by LBFGS
                //diag: Hk0. In this case, the LBFGS will handle it.
                //iprint:about output, not important
                //epsForConvergence: the accuracy with which the solution is to be found
                //xtol: machine precision
                //iflag: must be set to 0 in this case. the solution is found if the return value is 0
                LBFGS.lbfgs (featureNum, mForHessian, lambda, f, gradLogli, false, diag, iprint, epsForConvergence, xtol, iflag);
            }
            catch (LBFGS.ExceptionWithIflag e)  {
                System.err.println( "CRF: lbfgs failed.\n"+e );
                if (e.iflag == -1) {
                    System.err.println("Possible reasons could be: \n \t 1. Bug in the feature generation or data handling code\n\t 2. Not enough features to make observed feature value==expected value\n");
                }
                return false;
            }
            icall += 1;
        } while (( iflag[0] != 0) && (icall <= maxIteration));
        return true;
    }

    protected double norm(double ar[]) {
        double v = 0;
        for (int f = 0; f < ar.length; f++)
            v += ar[f] * ar[f];
        return Math.sqrt(v);
    }

    //the function below computes the gradient of the objective function (saved in grad[]) and returns the function value (saved in logli)
    protected double computeFunctionGradient(Dataset diter, double lambda[], double grad[]) {
        DataSequence dataSeq;
        DoubleDenseMatrix Mi_YY;
        Feature feature;
        double[] alpha_Y, newAlpha_Y, beta_Y[];
        double expF[], scale[];
        double val, thisSeqLogli, logli, Zx;
        int stateNum, markovOrder;
        int i, f, yp, yprev;


        logli=0;
        markovOrder=model.getMarkovOrder();
        stateNum=model.getStateNum();
        alpha_Y=new double[stateNum];
        newAlpha_Y=new double[stateNum];
        beta_Y=null;
        scale=null;
        expF = new double[featureGenerator.getFeatureNum()];
        Mi_YY=new DoubleFlatDenseMatrix(stateNum,stateNum);

        try {
            // calculate the spherical Gaussian weight prior for avoiding overfitting.
            for (f = 0; f < lambda.length; f++) {
                grad[f] = -1*lambda[f]*invSigmaSquare;
                logli -= ((lambda[f]*lambda[f])*invSigmaSquare)/2;
            }

            diter.startScan();
            while(diter.hasNext()) {
                dataSeq = (DataSequence)diter.next();
                MathUtil.initArray(alpha_Y,1); // initialize forward state-cost vector
                for (f = 0; f < lambda.length; f++)
                    expF[f] = 0; //store the expectation of F(Y, x) for current data sequence

                if ((beta_Y == null) || (beta_Y.length < dataSeq.length())) {
                    beta_Y = new double[2*dataSeq.length()][];
                    for (i = 0; i < beta_Y.length; i++)
                        beta_Y[i] = new double[stateNum];
                    scale = new double[2*dataSeq.length()];
                }

                // compute beta values in a backward scan.
                // also scale beta-values to 1 to avoid numerical problems.
                scale[dataSeq.length()-1] = (doScaling)?stateNum:1;
                MathUtil.initArray(beta_Y[dataSeq.length()-1],1.0/scale[dataSeq.length()-1]);
                for (i = dataSeq.length()-1; i >markovOrder-1; i--) {
                    // compute the Mi matrix and Beta(i-1)
                    computeTransMatrix(lambda,dataSeq,i,i,Mi_YY,true);
                    MathUtil.initArray(beta_Y[i-1],0);
                    genStateVector(Mi_YY, beta_Y[i], beta_Y[i-1],false); //beta_Y[i-1]=Mi_YY*beta_Y[i]

                    // need to scale the beta-s to avoid overflow
                    scale[i-1] = doScaling ? MathUtil.sumArray(beta_Y[i-1]):1;
                    if ((scale[i-1] < 1) && (scale[i-1] > -1))
                        scale[i-1] = 1;
                    MathUtil.multiArray(beta_Y[i-1], 1.0/scale[i-1]);
                }

                //calculate F(yk, xk) and expF(Y, xk)
                thisSeqLogli = 0;
                for (i = markovOrder-1; i < dataSeq.length(); i++) {
                    // compute the Mi matrix and new alpha (forward)
                    computeTransMatrix(lambda,dataSeq,i,i,Mi_YY,true);
                    MathUtil.initArray(newAlpha_Y,0);
                    genStateVector(Mi_YY, alpha_Y, newAlpha_Y,true); //newAlpha_Y=transpose(alpha_Y*Mi_YY)

                    featureGenerator.startScanFeaturesAt(dataSeq, i,i);
                    while (featureGenerator.hasNext()) {
                        feature = featureGenerator.next();
                        f = feature.getIndex();
                        yp = feature.getLabel();
                        yprev = feature.getPrevLabel();
                        val = feature.getValue();

                        if ((dataSeq.getLabel(i) == yp) && (((i-1 >= 0) && (yprev == dataSeq.getLabel(i-1))) || (yprev < 0))) {
                            grad[f] += val; //accumulate F(yk,xk)
                            thisSeqLogli += val*lambda[f];
                        }
                        if (yprev < 0)
                            expF[f] += val*newAlpha_Y[yp]*beta_Y[i][yp]; //state feature
                        else
                            expF[f] += val*alpha_Y[yprev]*Mi_YY.getDouble(yprev,yp)*beta_Y[i][yp]; //transition feature
                    }
                    MathUtil.copyArray(newAlpha_Y, alpha_Y);

                    // now scale the alpha-s to avoid overflow problems.
                    MathUtil.multiArray(alpha_Y, 1.0/scale[i]);
                }

                Zx = MathUtil.sumArray(alpha_Y);
                thisSeqLogli -= Math.log(Zx);

                // correct for the fact that alpha-s were scaled.
                for (i = markovOrder-1; i < dataSeq.length(); i++) {
                    thisSeqLogli -= Math.log(scale[i]);
                }
                logli += thisSeqLogli;

                // update the gradient.
                for (f = 0; f < grad.length; f++)
                    grad[f] -= expF[f]/Zx;
            }
        }
        catch (Exception e) {
            e.printStackTrace();
            System.exit(0);
        }
        return logli;
    }
}

?? 快捷鍵說(shuō)明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
精品国产欧美一区二区| 色综合久久综合网| eeuss鲁片一区二区三区在线观看 eeuss鲁片一区二区三区在线看 | 一区二区三区不卡视频| 日韩精品一级中文字幕精品视频免费观看 | av在线不卡免费看| 精品视频色一区| 日韩欧美国产综合| 国产精品高潮久久久久无| 亚洲午夜电影网| 国产精品亚洲第一区在线暖暖韩国| www.视频一区| 91精品国产丝袜白色高跟鞋| 日本一区二区免费在线观看视频| 亚洲精品成人天堂一二三| 麻豆成人在线观看| 99久久精品免费看| 日韩美女主播在线视频一区二区三区| 中文字幕成人av| 日韩精品每日更新| 成人av在线网站| 欧美日韩大陆在线| 国产精品沙发午睡系列990531| 亚洲国产欧美一区二区三区丁香婷| 精品一区二区国语对白| 色婷婷久久综合| 久久亚洲综合色一区二区三区| 一区二区三区四区不卡视频| 国产又黄又大久久| 欧美日韩黄色影视| 亚洲欧洲无码一区二区三区| 免费观看30秒视频久久| 97精品久久久午夜一区二区三区| 日韩一区二区视频在线观看| 亚洲欧美电影一区二区| 国产精品一区二区久久精品爱涩 | 亚洲午夜免费视频| 成人中文字幕在线| 日韩欧美一二三区| 亚洲午夜电影在线观看| 91在线观看视频| 精品久久久久久久久久久久久久久 | 亚洲精品高清在线观看| 国产一区二区三区观看| 欧美二区乱c少妇| 亚洲欧美日韩国产手机在线| 国产精品一区二区久久不卡| 欧美日韩在线直播| 中文字幕亚洲在| 国产成人在线视频网站| 日韩欧美中文字幕制服| 亚洲成人av电影| 在线视频国产一区| 国产精品激情偷乱一区二区∴| 琪琪一区二区三区| 欧美日韩一区二区在线观看视频| **性色生活片久久毛片| 国产成人免费网站| 久久精品视频网| 精品一区二区三区免费视频| 91精品久久久久久久99蜜桃| 婷婷综合久久一区二区三区| 色94色欧美sute亚洲线路一久| 中文字幕一区二区在线观看| 国产夫妻精品视频| www久久精品| 国产一区亚洲一区| ww亚洲ww在线观看国产| 久久99在线观看| 欧美一区日本一区韩国一区| 日韩一区精品字幕| 7777精品伊人久久久大香线蕉完整版 | 无吗不卡中文字幕| 欧美丝袜丝nylons| 亚洲成av人片观看| 在线不卡一区二区| 五月天激情综合网| 91精品国产高清一区二区三区| 午夜激情一区二区| 欧美一区日本一区韩国一区| 免费的国产精品| 欧美成人一区二区三区片免费| 日本不卡一二三| 欧美成人一区二区三区片免费| 理论片日本一区| 欧美精品一区二区三区在线| 精品亚洲欧美一区| 国产精品女同一区二区三区| 97精品视频在线观看自产线路二| 亚洲激情在线播放| 在线成人免费视频| 国产在线视频不卡二| 国产偷国产偷精品高清尤物| av动漫一区二区| 一区二区国产盗摄色噜噜| 欧美日韩中字一区| 久久精品国产秦先生| 久久―日本道色综合久久| av一区二区三区四区| 亚洲影视在线播放| 欧美一级一区二区| 国产91色综合久久免费分享| 亚洲婷婷综合色高清在线| 欧美视频一区二区在线观看| 麻豆精品一区二区| 欧美国产成人在线| 欧美无乱码久久久免费午夜一区| 三级久久三级久久久| 久久精品视频免费| 在线观看视频一区二区| 秋霞电影网一区二区| 国产欧美精品一区aⅴ影院| 91黄色在线观看| 精品在线观看免费| 综合在线观看色| 日韩一区二区三区在线观看| 国产91精品一区二区| 亚洲高清视频在线| 久久综合色一综合色88| 91猫先生在线| 久久69国产一区二区蜜臀| 中文字幕一区三区| 日韩欧美一区二区视频| jvid福利写真一区二区三区| 日韩精品色哟哟| 中文字幕在线不卡一区二区三区| 91精品国产一区二区三区蜜臀| 成人ar影院免费观看视频| 天天av天天翘天天综合网| 久久久美女毛片| 欧美精品粉嫩高潮一区二区| 国产成人精品亚洲777人妖| 午夜婷婷国产麻豆精品| 国产精品免费看片| 日韩美女一区二区三区| 色偷偷久久一区二区三区| 国产综合久久久久久鬼色| 亚洲一二三四在线观看| 国产欧美日韩亚州综合| 欧美一区二区网站| 91麻豆高清视频| 国产精品小仙女| 欧美aaaaa成人免费观看视频| 综合网在线视频| 国产亚洲综合色| 欧美一区二区三区日韩| 色婷婷精品大在线视频 | 欧美激情综合五月色丁香小说| 欧美日韩不卡一区| 91看片淫黄大片一级在线观看| 韩国理伦片一区二区三区在线播放| 亚洲午夜免费电影| 国产精品传媒入口麻豆| 久久九九影视网| 精品国产一区久久| 欧美精品久久一区二区三区| 91视频在线观看| 成人毛片老司机大片| 黄色日韩三级电影| 日韩国产高清在线| 无吗不卡中文字幕| 亚洲专区一二三| 亚洲四区在线观看| 中国色在线观看另类| 国产午夜一区二区三区| 欧美va亚洲va国产综合| 91精品久久久久久久99蜜桃| 欧美日韩一本到| 欧美亚洲丝袜传媒另类| 在线视频一区二区三区| 91丨porny丨国产入口| caoporen国产精品视频| 高清shemale亚洲人妖| 国产一区二区免费在线| 狠狠色丁香久久婷婷综合_中| 麻豆成人久久精品二区三区小说| 日韩高清不卡在线| 丝瓜av网站精品一区二区| 亚洲成人免费电影| 婷婷久久综合九色综合绿巨人| 亚洲一二三四在线| 婷婷国产在线综合| 日韩精品一级中文字幕精品视频免费观看 | 日本亚洲免费观看| 青青草国产精品97视觉盛宴| 日韩精品成人一区二区三区| 午夜国产精品一区| 秋霞午夜av一区二区三区| 蜜臀va亚洲va欧美va天堂| 麻豆一区二区三区| 国产在线一区观看| 风间由美一区二区av101| 高清成人在线观看| 91老司机福利 在线| 91国产免费看| 在线不卡中文字幕播放| 精品日韩一区二区| 国产欧美日本一区视频| 亚洲欧洲精品一区二区三区不卡| 亚洲欧美日韩国产手机在线|