?? bfgs.cc
字號:
//============================================================// COOOL version 1.1 --- Nov, 1995// Center for Wave Phenomena, Colorado School of Mines//============================================================//// This code is part of a preliminary release of COOOL (CWP// Object-Oriented Optimization Library) and associated class // libraries. //// The COOOL library is a free software. You can do anything you want// with it including make a fortune. However, neither the authors,// the Center for Wave Phenomena, nor anyone else you can think of// makes any guarantees about anything in this package or any aspect// of its functionality.//// Since you've got the source code you can also modify the// library to suit your own purposes. We would appreciate it // if the headers that identify the authors are kept in the // source code.////=============================// author: H. Lydia Deng, 06/17/96//=============================#include <defs.hh>#include <BFGS.hh> static const char* myNameIs = "the BFGS search";const char* BFGS::className() const { return (myNameIs);}BFGS::BFGS(LineSearch* p, int it, double eps) : LineSearchOptima(p){ iterMax = it; tol = eps; iterNum = 0;}BFGS::BFGS(LineSearch* p, int it, double eps, int verb) : LineSearchOptima(p, verb){ iterMax = it; tol = eps; iterNum = 0;}Model<double> BFGS::optimizer(Model<double>& model0){ //reset the residue history for every new optimizer iterNum = 0; if (residue != NULL) { delete residue; residue = new List<double>; } // Initial settings for some parameters int n = model0.modSize(); Vector<double> g0 = ls->gradient(model0); double lambda = 0.025; double descent = 0.; // check the gradient, in case the initial model is the optimal double err = (double)sqrt(g0*g0); // if (isVerbose) cerr << "Initial residue : " << err << endl; Optima::appendResidue(err); // residual if (err < tol) { if (isVerbose) cerr << "Initial guess was great! \n"; isSuccess = 1; return model0; } //initial identical matrix for estimating inverse of the Hessian Vector<double> diag(n); diag = 1.; DensMatrix<double> H(diag); double a, d, scale; Model<double> model1(model0); Vector<double> s(n),gamma(n),delta(n),g1(n); //searching directions s = - H*g0; descent = s*g0; //line search for a new model model1 = ls->search(model0, s, descent, lambda); g1 = ls->gradient(model1); err = (double)sqrt(g1*g1); if (isVerbose) cerr << "Iteration (" << iterNum << ") : " <<"current value of the objective function: " <<ls->currentValue() << "\t current residue: "<< err << endl; Optima::appendResidue(err); // residual iterNum ++; DensMatrix<double> B(n,n); while (residue->last() > tol && iterNum < iterMax) { gamma = g1 - g0; delta = model1.modParam() - model0.modParam(); //replace the searching direction with temporal storage s = H*gamma; //factor of the denominator d = delta*gamma; if (d < 0.00000001) { // re-initialize the Hessian Matrix H = 0; H = diag; } else { d = 1./(delta*gamma); scale = d*(gamma*s); scale += 1; scale *= d; // update the first term H += scale*outProduct(delta,delta); //updat the second term B = outProduct(s,delta); H -= d*(B.adjoint() + B); //store the current model and gradient g0 = g1; model0 = model1; } s = -H*g0; descent = s*g0; model1 = ls->search(model0, s, descent, lambda); g1 = ls->gradient(model1); err = (double)sqrt(g1*g1); if (isVerbose) cerr << "Iteration (" << iterNum << ") : "<<"current value of the objective function: " <<ls->currentValue() << "\t current residue: "<< err << endl; Optima::appendResidue(err); // residual iterNum ++; } return(model1);}Model<long> BFGS::optimizer(Model<long>& model0){ Model<double> temp(model0); temp = optimizer(temp); Model<long> m(temp); return m;}
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -