?? swarm.m
字號:
function [sol, fval, evals] = swarm(varargin)
%SWARM Particle Swarm Optimization
%
% Usage:
% sol = SWARM(PROBLEM)
% sol = SWARM(func, swarmsize, lb, ub)
% sol = SWARM(func, swarmsize, lb, ub, 'option1', value1, ...)
%
% [sol, fval] = SWARM(...)
% [sol, fval, evals] = SWARM(...)
%
% SWARM(func, popsize, lb, ub) tries to find the global minimum of the
% fitness-function [func], using a particle swarm optimization strategy.
% The number of particles is set by [swarmsize], and the boundaries for
% each dimension are set by the vectors [lb] and [ub], respectively.
%
% [sol, fval, evals] = SWARM(...) returns the trial vector found to yield the
% global minimum in [sol], and the corresponding function value by [fval].
% The total amount of function evaluations that the algorithm performed is
% returned in [evals].
%
% The function [func] must accept a vector argument of length [N], equal to
% the number of elements in the vectors [lb] or [ub]. Also, the function
% must be vectorized so that inserting a matrix of [popsize]x[N] will return
% a vector of size [popsize]x[1] containing the corresponding function values
% for the [N] trial vectors.
%
% The default parameters used by SWARM are
%
% eta1 = 2 social factor
%
% eta2 = 2 cooperative factor
%
% eta3 = 0.5 nostalgia factor
%
% omega = 0.5 inertial constant
%
% numneighbors = 5 amount of neighbors for each particle
%
% convvalue = 150 maximum number of iterations without
% improvement
%
% These values, as well as additional options, may be given manually in
% any extra input variables. Additionally, SWARM may be called as
% SWARM(PROBLEM), where [PROBLEM] is a complete problem-structure
% constructed by HEURSET. Type 'help HEURSET' for more details on how to
% use it.
%
% Some optimizations require repeated evaluation of a function that takes
% in the order of hours per evaluation. In those cases, it might be
% convenient to interrupt the optimization after a few days and use the
% results thus far obtained. Unfortunately, usually after you interrupt a
% function, its results are lost. For that reason, SWARM creates two
% global variables, [SWARM_bestind] and [SWARM_bestfval], which store
% the best individual and function value thus far found. When the
% optimization is interrupted, the intermediate results from the
% optmization are still available. Once an optimization has succesfully
% converged, these variables are deleted from the workspace again.
%
% See also heurset, swarm, simanneal, diffevolve, genetic.
% Author: Rody P.S. Oldenhuis
% Delft University of Technology
% E-mail: oldenhuis@dds.nl
% Last edited: 28/Feb/2009
%% initizalize & parse input
% initial values
skippop = false;
problem = heurset;
[pop, func, swarmsize, lb, ub, grace, display, maxfevals, convmethod, convvalue, ...
eta1, eta2, eta3, omega, numneighbors] = parseprob(problem);
% define temporary globals
global SWARM_bestfval SWARM_bestind
% common inputs
if (nargin >= 4)
func = varargin{1};
swarmsize = varargin{2};
lb = varargin{3};
ub = varargin{4};
end
% with additional options
if (nargin >= 5)
if ~isstruct(varargin{5})
options = heurset(varargin{5:end});
else
options = varargin{5};
end
[dum1, dum2, dum3, dum4, dum5, grace, display, maxfevals, convmethod, ...
convvalue, eta1, eta2, eta3, omega, numneighbors] = parseprob(options);
end
% if called from GODLIKE
if (nargin == 2)
problem = varargin{2};
% errortrap
if ~isstruct(problem)
error('PROBLEM should be a structure. Type `help heurset` for details.')
end
[pop, func, swarmsize, lb, ub, grace, display, maxfevals, convmethod, ...
convvalue, eta1, eta2, eta3, omega, numneighbors] = parseprob(problem);
% make sure the options are correct
convmethod = 'maxiters';
grace = 0;
display = false;
skippop = true;
end
% if given a full problem structure
if (nargin == 1)
problem = varargin{1};
% errortrap
if ~isstruct(problem)
error('PROBLEM should be a structure. Type `help heurset` for details.')
end
[pop, func, swarmsize, lb, ub, grace, display, maxfevals, convmethod, convvalue,...
eta1, eta2, eta3, omega, numneighbors] = parseprob(problem);
end
% initialize convergence method and adaptive control parameter, setting
% the default values if they are not given
if strcmpi(convmethod, 'exhaustive')
convergence = 1;
maxiterinpop = convvalue;
maxiters = inf;
elseif strcmpi(convmethod, 'maxiters')
convergence = 2;
maxiterinpop = inf;
maxiters = convvalue;
elseif strcmpi(convmethod, 'achieveFval')
convergence = 3;
%errortrap
if isempty(convvalue)
error('Please define function value to achieve.')
end
maxiterinpop = inf;
maxiters = inf;
else
convergence = 1;
maxiterinpop = convvalue;
end
% problem dimensions
dims = size(lb, 2);
% errortraps
if ( (size(lb, 2) ~= 1 || size(ub, 2) ~= 1) &&...
(size(lb, 2) ~= dims || size(ub, 2) ~= dims) )
error('Upper- and lower boundaries must be the same size.')
end
if (swarmsize < numneighbors+1)
error('SWARM needs a population size at least equal to the number of neighbors, plus one.')
end
%% initial values
% iteration-zero values
improvement = maxiterinpop;
previousbestfit = inf;
iters = 0;
evals = 0;
firsttime = true;
converging1 = false;
converging2 = false;
% boundaries on hyperspace
if (numel(lb) == 1)
mins = repmat(lb, swarmsize, dims);
maxs = repmat(ub, swarmsize, dims);
end
if (numel(lb) == numel(ub) && size(lb, 2) == dims)
mins = repmat(lb, swarmsize, 1);
maxs = repmat(ub, swarmsize, 1);
end
% buondaries on hypervelocities
velUb = (maxs - mins)/2;
velLb = (maxs - mins)/1e6;
% initialize swarm
if (~skippop)
pop = repmat(ub, swarmsize, 1) - repmat(ub-lb, swarmsize, 1).* rand(swarmsize, dims);
end
velocity = repmat( abs(ub-lb)/2, swarmsize, 1) - ...
repmat(-abs(ub-lb)/2, swarmsize, 1).* rand(swarmsize, dims);
localbest = pop;
prevpop = pop;
% initialize neighbors
neighbors = round( swarmsize * rand(swarmsize, numneighbors));
selfcentered = true;
while selfcentered
selfs = repmat( (1:swarmsize)', 1, numneighbors);
selfindices = (selfs == neighbors);
zeroindices = (neighbors == 0);
neighbors(selfindices) = round(swarmsize*rand(sum(selfindices(:)), 1));
neighbors(zeroindices) = round(swarmsize*rand(sum(zeroindices(:)), 1));
selfindices = (selfs == neighbors);
zeroindices = (neighbors == 0);
if ~any(selfindices(:)) && ~any(zeroindices(:))
selfcentered = false;
else
selfcentered = true;
end
end
% initial fitnesses are worst possible
localbestfit = inf(swarmsize, 1);
% Display strings
overfevals1 = '\n\nCould not find better solution within given maximum';
overfevals2 = '\nof function evaluations. Increase MaxFevals option.\n\n';
fitbackspace = '\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b';
converge1bck = [fitbackspace, '\b\b\b\b\b\b\b\b\b\b\b'];
converge2bck = [fitbackspace, '\b\b\b\b\b\b\b'];
convergestr = ', possibly converging: ';
itersstr = ', iterations left: ';
if display
fprintf(1, ['\nNote that when SWARM is interrupted, the best solution and function\n',...
'value are saved in global variables SWARM_bestind and SWARM_bestfval.\n\n']);
fprintf(1, 'Optimizing with ');
switch convergence
case 1
fprintf(1, 'exhaustive convergence criterion...\n');
case 2
fprintf(1, 'maximum iterations convergence criterion...\n');
case 3
fprintf(1, 'goal-attain convergence criterion...\n');
end
fprintf(1, 'Current best solution has cost of ------------- ');
end
%% Particle Swarm optimization loop
% replication matrices
dimsrep = ones(1, dims);
swarmrep = ones(swarmsize, 1);
neighinds = (0:swarmsize-1)';
% loop while none of the convergence criteria is met
while (improvement > 0 && iters <= maxiters)
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -