?? rpsmooth.m
字號:
function f=rpsmooth(x,nambda)
% Roughness Penalty Smoothing.
%
% f=rpsmooth(x,nambda);
%
% x ------- Input data for smoothing. It must be a vector.
%
% nambda -- The smoothing parameter. It measures the rate of exchange
% between fit to the data. As nambda become larger and larger,
% functions which are not linear get more penalty,vice versa.
% You can input one number for nambda, if you have some
% knowladge about your data and if not, you can input a vector
% contain two number, which input a scale to choose the best
% parameter for your data smoothing by leave-one-out
% Cross-Validation.
%
% DNP. 2007.12.25
% Merry Christmas!
tic
f=[];
if nargin<1
error('Not enough inputs!');
% return
elseif nargin>2
error('Too many inputs!');
% return
elseif nargin==1 % If you have not input the parameter, the default changable ranger of nambda is
nambda_low=1; % from 1:100, with the change step is 10.
step_wise=10;
nambda_up=100;
elseif nargin==2
if (length(nambda)==1 & nambda > 0) | (length(nambda)==2 & isequal(nambda(1),nambda(2)))
nambda_low=nambda(1);
step_wise=[];
nambda_up=nambda(1);
elseif nambda==0
f=x;
return
elseif any(nambda<0)
error('The bandwidth must be nonnegative.')
% return
elseif length(nambda)>2
error('The element in parameter vector must not more then 2.')
% return
else
nambda_low=min(nambda);
step_wise=(max(nambda)-min(nambda))/20; % The parameter change step is (max(nambda)-min(nambda))/20
nambda_up=max(nambda);
end
end
[r c]=size(x);
if ~any([r c]==1)
error('Input data must be a vector!');
% return
elseif r==1
x=x';
end
% *****************************************************************************
% ... Creat Matrix Q and R for calculating K. ................
Q=[];
R=[];
n=max(r,c);
h=ones(n,1); % The width of intervals.
for i=1:n-2
Q(i:i+2,i)=[1;-2;1];
% Q(i,i)=1/h(i);
% Q(i+1,i)=-1/h(i)-1/h(i+1);
% Q(i+2,i)=1/h(i+1);
R(i,i)=(h(i)+h(i+1))/3;
if i==n-2
break
else
R(i,i+1)=h(i+1)/6;
R(i+1,i)=R(i,i+1);
end
end
K=Q*inv(R)*Q';
% toc
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% ....... Leave-one-out Cross Validation. .........
rescv=[]; % Store the CV value.
para=[]; % Store the paramters.
j=0;
for alfa=nambda_low:step_wise:nambda_up
j=j+1;
para(j)=alfa;
A=inv(eye(n)+alfa*K);
% rescv(j)=0;
f=A*x;
rescv(j)=sum((x-f)./(1-diag(A)).^2);
% for i=1:n
% rescv(j)=rescv(j)+(x(i)-f(i))/(1-A(i,i))^2;
% end
end
if length(rescv)>1
subplot(2,1,1);
plot(rescv);
[rescvmin index]=min(rescv);
A=inv(eye(n)+para(index)*K);
f=A*x;
subplot(2,1,2);
plot(f)
t=toc
else
A=inv(eye(n)+nambda*K);
f=A*x;
plot(f)
t=toc
end
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -