亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? ffnc.m

?? 這是模式識別方面的電子書籍
?? M
字號:
%FFNC Feed-forward neural net classifier back-end % % 	[W,HIST] = FFNC (ALG,A,UNITS,ITER,W_INI,T,FID)%% INPUT% 	ALG   Training algorithm: 'bpxnc' for back-propagation (default), 'lmnc' %         for Levenberg-Marquardt% 	A     Training dataset% 	UNITS Array indicating number of units in each hidden layer (default: [5])% 	ITER  Number of iterations to train (default: inf)% 	W_INI Weight initialisation network mapping (default: [], meaning %         initialisation by Matlab's neural network toolbox)% 	T     Tuning set (default: [], meaning use A)%   FID   File ID to write progress to (default [], see PRPROGRESS)%% OUTPUT% 	W     Trained feed-forward neural network mapping% 	HIST  Progress report (see below)%% DESCRIPTION % This function should not be called directly, but through one of its % front-ends, BPXNC or LMNC. Uses the Mathworks' Neural Network toolbox.% % SEE ALSO% MAPPINGS, DATASETS, BPXNC, LMNC, NEURC, RNNC, RBNC, PRPROGRESS% Copyright: R.P.W. Duin, duin@ph.tn.tudelft.nl% Faculty of Applied Physics, Delft University of Technology% P.O. Box 5046, 2600 GA Delft, The Netherlands% $Id: ffnc.m,v 1.8 2008/07/03 09:08:43 duin Exp $function [w,hist] = ffnc(alg,a,units,max_iter,w_ini,t,fid)	prtrace(mfilename);	% Settings for the different training algorithms.		if exist('nnet') ~= 7		error('Neural network toolbox not found')	end	if (strcmp(alg,'bpxnc'))		mapname = 'BP Neural Classf';		elseif (strcmp(alg,'lmnc'))		mapname = 'LM Neural Classf';	else		error('illegal training algorithm specified');	end;	% Check arguments	if (nargin < 7), fid = []; end;	if (nargin < 6) | (isempty(t))		prwarning(2,'no tuning set supplied, using training set for tuning (risk of overfit)');		if (nargin < 2), t = []; else, t = a; end;	end	if (nargin < 5) | (isempty(w_ini))		prwarning(3,'no initialisation supplied, using Nguyen-Widrow random initialisation');		w_ini = []; 	end	if (nargin < 4) | (isempty(max_iter))		prwarning(3,'no maximum number of iterations supplied, assuming infinite');		max_iter = inf; 	end	if (nargin < 3) | (isempty(units))		prwarning(2,'no network architecture specified, assuming one hidden layer of 5 units');		units = 5; 	end	if (nargin < 2) | (isempty(a))		w = mapping(alg,{units,max_iter,w_ini,t,fid});		w = setname(w,mapname);		hist = [];		return  end  if isnan(units) % optimize complexity parameter: number of neurons		defs = {5,[],[],[],[]};		parmin_max = [1,30;0,0;0,0;0,0;0,0];		[w,hist] = regoptc(a,alg,{units,max_iter,w_ini,t,fid},defs,[1],parmin_max,testc([],'soft'),0);		return  end  	% Training target values.	prwarning (4, 'using training targets 0.9/0.1');	target_high	= 0.9;	target_low	= 0.1;	% Check whether the dataset is valid.	islabtype(a,'crisp');	isvaldfile(a,1,2); 							% At least 1 object per class, 2 classes	a = testdatasize(a);	t = testdatasize(t);	iscomdset(a,t);   							% Check whether training and tuning set match	%a = setprior(a,getprior(a));	%t = setprior(a,getprior(t));		[m,k,c] = getsize(a); 	lablist = getlablist(a); 	% Standard training parameters.	disp_freq   = inf; 	err_goal 		= 0.02/m;						% Mean-squared error goal, stop if reached	trnsf_fn	  = 'logsig';					% Transfer function  perf_fn     = 'mse';            % Performance function	% Settings for the different training algorithms.	tp.show   = disp_freq;	tp.time   = inf;	tp.goal   = err_goal;	if (strcmp(alg,'bpxnc'))		trnalg 					= 'traingdx'; 		lrnalg 					= 'learngdm';		burnin       		= 500;			% Never stop training before this many iters		tp.epochs    		= min(50,max_iter); 	% Iteration unit		tp.lr        		= 0.01;			% BP, initial value for adaptive learning rate		tp.lr_inc    		= 1.05;			% BP, multiplier for increasing learning rate		tp.lr_dec    		= 0.7;			% BP, multiplier for decreasing learning rate		tp.mc        		= 0.95;			% BP, momentum		tp.max_perf_inc = 1.04;			% BP, error ratio		tp.min_grad  		= 1e-6;			% BP, minimum performance gradient		tp.max_fail  		= 5;				% BP, maximum validation failures		speed           = 10000;    % waitbar speed	elseif (strcmp(alg,'lmnc'))		trnalg 					= 'trainlm';  		lrnalg 					= 'learngdm';		burnin       		= 50;				% Never stop training before this many iters		tp.epochs 	 		= min(1,max_iter); 		% Iteration unit		tp.mem_reduc 		= 1;				% Trade-off between memory & speed		tp.max_fail  		= 1; 				% LM, maximum validation failures		tp.min_grad  		= 1e-6;			% LM, minimum gradient, stop if reached		tp.mu  	    		= 0.001;		% LM, initial value for adaptive learning rate		tp.mu_inc    		= 10;				% LM, multiplier for increasing learning rate		tp.mu_dec    		= 0.1;			% LM, multiplier for decreasing learning rate		tp.mu_max    		= 1e10;			% LM, maximum learning rate		speed           = 100;      % waitbar speed	end;		% Scale each feature to the range [0,1].	prwarning(3,'scaling such that training set features have range [0,1]');	ws = scalem(a,'domain'); a_scaled = a*ws; t_scaled = t*ws;	% Set number of network outputs: 1 for 2 classes, c for c > 2 classes.	if (c == 2), cout = 1; else cout = c; end	% Create target matrix: row C contains a high value at position C,	% the correct class, and a low one for the incorrect ones (place coding).	if (cout > 1)    target = target_low * ones(c,c) + (target_high - target_low) * eye(c);	else		target = [target_high; target_low];	end	% Create the target arrays for both datasets.	target_a = target(getnlab(a),:)';	target_t = target(getnlab(t),:)';	% Create the network layout: K inputs, N(:) hidden units, COUT outputs.	numlayers = length(units)+1; numunits = [k,units(:)',cout];	transfer_fn = cellstr(char(ones(numlayers,1)*trnsf_fn));	% Create network and set training parameters. The network is initialised	% by the Nguyen-Widrow rule by default.	net = newff(ones(numunits(1),1)*[0 1],numunits(2:end),...							transfer_fn,trnalg,lrnalg,perf_fn);	net.trainParam = tp;	% If an initial network is specified, use its weights and biases.	if (~isempty(w_ini))		% Use given initialisation.		[data,lab,type_w] = get(w_ini,'data','labels','mapping_file');		if (strcmp(type_w,'sequential'))			a_scaled = a*data{1}; t_scaled = t*data{1}; ws = data{1};			[data,lab,type_w] = get(data{2},'data','labels','mapping_file');		end		% Check whether the mapping's dimensions are the same as the network's.		[kw,cw] = size(w_ini); net_ini = data{1};		if (~strcmp(type_w,'neurc')) | (kw ~= k) | (cw ~= c) | ...				(net.numInputs  ~= net_ini.numInputs) | ...				(net.numLayers  ~= net_ini.numLayers) | ...				(net.numOutputs ~= net_ini.numOutputs) | ...				any(net.biasConnect ~= net_ini.biasConnect) | ...				any(net.inputConnect ~= net_ini.inputConnect) | ...				any(net.outputConnect ~= net_ini.outputConnect)			error('incorrect size initialisation network supplied')		end		% Check whether the initialisation network was trained on the same data.		[dummy1,nlab,dummy2] = renumlab(lablist,lab);		if (max(nlab) > c)			error('initialisation network should be trained on same classes')		end		net.IW = net_ini.IW; net.LW = net_ini.LW; net.b = net_ini.b;	end	% Initialize loop 	opt_err = inf; opt_iter = inf; opt_net = net; 	iter = 0; this_iter = 1; hist = []; 		% Loop while	% - training has not gone on for longer than 50 iterations or 2 times the 	%   number of iterations for which the error was minimal, and	% - the number of iterations does not exceed the maximum	% - the actual training function still performed some iterations  	prprogress(fid,'%s: neural net, %i units: \n',alg,units);			prprogress(fid,'%i %5.3f %5.3f\n',0,1,1);	s = sprintf('%s: neural net, %i units',alg,units);	prwaitbar(100,s);  while ((iter <= 2*opt_iter) | (iter < burnin)) & ...				(iter < max_iter) & (this_iter > 0) & (opt_err > 0)		prwaitbar(100,100-100*exp(-iter/speed));		% Call TRAIN, from Matlab's NN toolbox.		prwarning(4,'[%d] calling NNETs train', iter);		%net.trainParam.mu = min(net.trainParam.mu_max*0.9999,net.trainParam.mu);		[net,tr] = train(net,+a_scaled',target_a);		this_iter = length(tr.epoch)-1; iter = iter + this_iter;		% Copy current learning rate as the one to start with for the next time.		if (strcmp(alg,'bpxnc'))			net.trainParam.lr = tr.lr(end);		else			net.trainParam.mu = tr.mu(end);		end;		% Map train and tuning set.  	w = mapping('neurc','trained',{net},lablist,k,c);  	w = setname(w,mapname); 			% Calculate mean squared errors (MSE).  	out_a = a_scaled*w; out_t = t_scaled*w; 		mse_a = mean(mean(((out_a(:,1:cout))-target_a').^2,1));		mse_t = mean(mean(((out_t(:,1:cout))-target_t').^2,1));		% Calculate classification errors.		e_a = testc(a_scaled,w); e_t = testc(t_scaled,w);			% If this error is minimal, store the iteration number and weights.  	if (e_t < opt_err)  		opt_err = e_t; opt_iter = iter; opt_net = net;   	end		w1 = cell2mat(net.IW); w1 = w1(:);		%w2 = cell2mat(net.LW'); bugfix, doesnot work for multilayer networks		%w2 = w2(:);		netLW = net.LW(:);		w2 = [];		for j=1: length(netLW)			ww = netLW{j};			w2 = [w2; ww(:)];		end  	hist = [hist; iter e_a e_t mse_a mse_t ...						mean([w1; w2].^2)];		prprogress(fid,'%i %5.3f %5.3f\n',iter,e_t,opt_err);  end	prwaitbar(0);	% Create mapping.  w = ws*mapping('neurc','trained',{opt_net},lablist,k,c);  w = setname(w,mapname);	w = setcost(w,a);return

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
色一区在线观看| 久久久欧美精品sm网站| 中文字幕亚洲不卡| 色综合视频在线观看| 亚洲免费视频中文字幕| 欧美人动与zoxxxx乱| 日本美女视频一区二区| 久久蜜桃av一区精品变态类天堂| 国产成人午夜视频| 一区二区三区中文字幕在线观看| www..com久久爱| 亚洲一区自拍偷拍| 久久久久国产免费免费| 一本色道久久综合狠狠躁的推荐| 日韩精品高清不卡| 国产精品夫妻自拍| 久久久www免费人成精品| 色综合久久综合网欧美综合网 | 久久99久国产精品黄毛片色诱| 久久一留热品黄| 欧美一级在线免费| 日韩美女视频在线| 91免费视频网址| 成人激情开心网| 国产一区二区h| 日本怡春院一区二区| 亚洲最新在线观看| 最新日韩av在线| 国产精品久久久久aaaa| 2023国产一二三区日本精品2022| 91福利社在线观看| 欧美唯美清纯偷拍| 欧美美女直播网站| 欧美精品在线观看一区二区| 欧美性生活一区| 欧美另类变人与禽xxxxx| 91视视频在线观看入口直接观看www | 一区二区高清在线| 中文字幕亚洲一区二区av在线| 国产日韩欧美精品一区| 国产精品久久久久永久免费观看 | 欧美日韩一区二区三区在线| 91色porny在线视频| 欧美探花视频资源| 欧美一区二区三区免费在线看| 91精品国产高清一区二区三区 | 5566中文字幕一区二区电影 | 欧美在线观看视频一区二区三区| 91视频免费播放| 欧美日韩免费观看一区三区| 91精品国产一区二区| 久久综合网色—综合色88| 亚洲国产精品成人久久综合一区| 亚洲少妇中出一区| 麻豆精品新av中文字幕| 成人三级伦理片| 欧美精品久久99久久在免费线 | 91精品免费观看| 国产精品久久久久永久免费观看| 五月天亚洲精品| 日本精品视频一区二区三区| 日韩午夜激情av| 日韩午夜在线影院| 久久久久国产精品麻豆ai换脸| 亚洲精品国产一区二区三区四区在线| 一区二区不卡在线播放| 国产91高潮流白浆在线麻豆 | 美女视频一区在线观看| 91色视频在线| 亚洲男人天堂av| 99久久综合狠狠综合久久| 国产午夜精品久久| 激情六月婷婷久久| 日韩写真欧美这视频| 免费观看91视频大全| 欧美一卡二卡在线| 丝瓜av网站精品一区二区| 91小视频在线免费看| 亚洲黄色免费电影| 在线亚洲欧美专区二区| 五月天中文字幕一区二区| 欧美日韩精品一区二区三区| 午夜av一区二区| 91精品国产色综合久久ai换脸| 性欧美疯狂xxxxbbbb| 日韩精品一区国产麻豆| 激情成人午夜视频| 亚洲视频在线观看三级| 欧美午夜不卡视频| 裸体健美xxxx欧美裸体表演| 精品欧美一区二区久久| 99综合电影在线视频| 亚洲h在线观看| 亚洲国产精品t66y| 欧洲中文字幕精品| 国产一区二区网址| 一区二区三区精品在线观看| 久久亚洲综合色一区二区三区| 91视频一区二区三区| 午夜不卡在线视频| 成人欧美一区二区三区| 欧美va亚洲va在线观看蝴蝶网| 在线成人小视频| 麻豆精品视频在线观看视频| 国产亚洲精品bt天堂精选| 久久99精品国产麻豆婷婷| 亚洲另类一区二区| 久久欧美中文字幕| 精品久久国产97色综合| 91玉足脚交白嫩脚丫在线播放| 九色|91porny| 蜜臀久久久99精品久久久久久| 亚洲蜜臀av乱码久久精品| 欧美激情一区二区三区在线| 久久伊人中文字幕| 欧美mv日韩mv| 久久久www免费人成精品| 精品成人一区二区| 久久嫩草精品久久久精品| 日韩欧美自拍偷拍| 日韩亚洲欧美成人一区| 日韩无一区二区| 久久久久久影视| 亚洲视频一区在线| 亚洲成av人影院| 亚洲成人动漫在线免费观看| 亚洲人精品一区| 亚洲1区2区3区4区| 日日夜夜免费精品| 国产乱妇无码大片在线观看| 国产一区日韩二区欧美三区| 成人激情图片网| 7878成人国产在线观看| 精品国产一二三区| 欧美激情综合在线| 肉丝袜脚交视频一区二区| 国产一区二区免费视频| 色综合色狠狠天天综合色| 欧美一区二区三区在线观看| 国产午夜精品一区二区| 日韩va欧美va亚洲va久久| 狠狠色综合日日| 91福利国产精品| 国产精品免费丝袜| 蜜桃视频在线观看一区二区| 91麻豆国产精品久久| 日韩视频在线观看一区二区| 中文字幕在线一区| 国产一区91精品张津瑜| 欧美老人xxxx18| 亚洲午夜精品在线| 91麻豆蜜桃一区二区三区| 国产精品无圣光一区二区| 久久99国产精品久久99| 青青草成人在线观看| 成人国产在线观看| 精品粉嫩超白一线天av| 麻豆精品一区二区综合av| 欧美视频第二页| 亚洲成人久久影院| 欧美中文字幕一区二区三区亚洲 | www.成人在线| 亚洲欧美国产三级| 欧洲一区二区三区在线| 亚洲美女电影在线| 欧美日韩国产三级| 精品伊人久久久久7777人| 欧美va日韩va| 成人精品一区二区三区中文字幕| 国产三级一区二区| 91麻豆免费看| 青青国产91久久久久久| 久久久久久久久久久久久久久99| 国产福利一区二区三区视频 | 欧美日韩精品二区第二页| 免费观看在线综合| 亚洲国产成人va在线观看天堂| 一本色道久久综合狠狠躁的推荐| 亚洲一二三四区| 欧美成人性战久久| 欧美在线不卡一区| 国产69精品久久久久777| 亚洲人成影院在线观看| 26uuu另类欧美亚洲曰本| 色综合天天综合在线视频| 蜜臀av国产精品久久久久 | 国产精品视频yy9299一区| 欧美色手机在线观看| 国产盗摄精品一区二区三区在线| 亚洲一二三区在线观看| 2021中文字幕一区亚洲| 欧美一区二区人人喊爽| 色综合久久久久久久久| 成人av电影免费在线播放| 免费高清在线一区| 免费在线观看不卡| 免费观看91视频大全| 日韩不卡一区二区三区 | 日本女优在线视频一区二区| 亚洲一区二区三区在线看|