牧羊集团是上市公司吗:求:matlab 自适应算法 程序!

来源:百度文库 编辑:查人人中国名人网 时间:2024/04/29 12:19:00
要用于自适应天线的算法防真程序,
LMS,RLS都可以

有追加分

要快点啊,谢谢了!

%lms算法源程序

clear all
close all
%channel system order
sysorder = 5 ;
% Number of system points
N=2000;
inp = randn(N,1);
n = randn(N,1);
[b,a] = butter(2,0.25);
Gz = tf(b,a,-1);
%This function is submitted to make inverse Z-transform (Matlab central file exchange)
%The first sysorder weight value
%h=ldiv(b,a,sysorder)';
% if you use ldiv this will give h :filter weights to be
h= [0.0976;
0.2873;
0.3360;
0.2210;
0.0964;];
y = lsim(Gz,inp);
%add some noise
n = n * std(y)/(10*std(n));
d = y + n;
totallength=size(d,1);
%Take 60 points for training
N=60 ;
%begin of algorithm
w = zeros ( sysorder , 1 ) ;
for n = sysorder : N
u = inp(n:-1:n-sysorder+1) ;
y(n)= w' * u;
e(n) = d(n) - y(n) ;
% Start with big mu for speeding the convergence then slow down to reach the correct weights
if n < 20
mu=0.32;
else
mu=0.15;
end
w = w + mu * u * e(n) ;
end
%check of results
for n = N+1 : totallength
u = inp(n:-1:n-sysorder+1) ;
y(n) = w' * u ;
e(n) = d(n) - y(n) ;
end
hold on
plot(d)
plot(y,'r');
title('System output') ;
xlabel('Samples')
ylabel('True and estimated output')
figure
semilogy((abs(e))) ;
title('Error curve') ;
xlabel('Samples')
ylabel('Error value')
figure
plot(h, 'k+')
hold on
plot(w, 'r*')
legend('Actual weights','Estimated weights')
title('Comparison of the actual weights and the estimated weights') ;
axis([0 6 0.05 0.35])

% RLS 算法
randn('seed', 0) ;
rand('seed', 0) ;

NoOfData = 8000 ; % Set no of data points used for training
Order = 32 ; % Set the adaptive filter order

Lambda = 0.98 ; % Set the forgetting factor
Delta = 0.001 ; % R initialized to Delta*I

x = randn(NoOfData, 1) ;% Input assumed to be white
h = rand(Order, 1) ; % System picked randomly
d = filter(h, 1, x) ; % Generate output (desired signal)

% Initialize RLS

P = Delta * eye ( Order, Order ) ;
w = zeros ( Order, 1 ) ;

% RLS Adaptation

for n = Order : NoOfData ;

u = x(n:-1:n-Order+1) ;
pi_ = u' * P ;
k = Lambda + pi_ * u ;
K = pi_'/k;
e(n) = d(n) - w' * u ;
w = w + K * e(n) ;
PPrime = K * pi_ ;
P = ( P - PPrime ) / Lambda ;
w_err(n) = norm(h - w) ;

end ;

% Plot results

figure ;
plot(20*log10(abs(e))) ;
title('Learning Curve') ;
xlabel('Iteration Number') ;
ylabel('Output Estimation Error in dB') ;

figure ;
semilogy(w_err) ;
title('Weight Estimation Error') ;
xlabel('Iteration Number') ;
ylabel('Weight Error in dB') ;