資源簡介
學習維納濾波原理及自適應算法時,編寫的SD算法和LMS算法在統一條件下的仿真,有畫SD算法的學習曲線和權值變化曲線,LMS算法的多次實驗下的學習曲線和權系數更新曲線。參考教材:現代數字信號處理及其應用(何子述、夏威)

代碼片段和文件信息
clc
clear?all
%******************************************%
%*********Speech?channel?model*************%
%******************************************%
%輸入參數:
%?????????data?????????????激勵信號序列序列(列向量)
%輸出參數
%?????????speech_signal?????輸出語音信號(列向量)
%初始化
data_average=0;%激勵信號均值
data_variance=0.27;%激勵信號方差
iterations=500;%迭代次數
data=normrnd(data_averagesqrt(data_variance)1iterations).‘;%產生白噪聲序列
%畫圖(白噪聲data)
figure(1)
subplot(311)
plot(data‘y‘);
grid?on
title(sprintf(‘N(%d%d)分布的白噪聲序列‘data_averagedata_variance));
subplot(312)
R_data=xcorr(data‘‘);
plot(R_data‘m‘);
grid?on
title(sprintf(‘N(%d%d)分布的噪聲序列自相關函數‘data_averagedata_variance));
subplot(313)
periodogram(data[]1024250);
title(sprintf(‘N(%d%d)分布的噪聲序列功率譜密度‘data_averagedata_variance));
%結束畫圖
speech_signal?=?filter(1[10.8458]data);
%畫圖
figure(2)
subplot(211)
plot(speech_signal‘m‘);
title(‘語音信號序列‘);
axis([-10?510?-5?5]);
%結束畫圖
%******************************************%
%******Transmission?signal?model***********%
%******************************************%
%輸入參數:
%????????speech_signal????激勵信號序列序列(列向量)
%????????noise????????????信道中的加性噪聲(列向量)
%輸出參數
%????????data_in_fiter????輸出經過信道的帶噪語音信號(列向量)
noise_average=0;%噪聲均值
noise_variance=0.1;%噪聲方差
noise=normrnd(noise_averagesqrt(noise_variance)1iterations).‘;
data_in_fiter?=?filter(1[1-0.9458]speech_signal)+noise;
%畫圖
subplot(212)
plot(data_in_fiter‘r‘);
title(‘輸出經過信道的帶噪語音信號序列‘);
axis([-10?510?-5?5]);
%結束畫圖
%******************************************%
%*************Least?Mean?Square************%
%******************************************%
%輸入參數:
%?????????data_in_fiter????濾波器輸入信號序列(列向量)
%?????????expct_data???????所期望的響應序列(列向量)
%?????????M_rank???????????濾波器階數(標量)
%?????????u_step???????????濾波器步長(標量)
%輸出參數
%????????weight????????????濾波器權值矩陣(矩陣?M_rank*length(data_in_fiter))
%??????????????????????????設迭代次數即為其輸入序列長度
%????????erro??????????????誤差序列(length(data_in_fiter)*i)
%????????real_data?????????實際輸出序列
%????????mse???????????????均方誤差
%輸入參數賦值
expct_data?=?speech_signal;
M_rank?=?2;
u_step?=?0.02;
%初始化
n?=?0;%迭代次數
weight?=?zeros(M_rankiterations);%權重
erro?=?zeros(1iterations);%誤差
data_instantaneous?=?zeros(1iterations+M_rank-1).‘;%輸入序列瞬時值
real_data?=?zeros(1iterations);%期望信號估計,實際輸出的序列
mse?=?zeros(1iterations);%均方誤差
erro(1)?=?expct_data(1)?-?real_data(1);
data_instantaneous(1:iterations:)?=??flipud(data_in_fiter);%flipud?實現倒序
mse(1)?=?erro(1)^2;
for?n?=?1:(iterations-1)
????data_temporary?=?data_instantaneous((iterations+1-n):(iterations+M_rank-n));%臨時
????weight(:n+1)?=?weight(:n)?+?u_step*data_temporary*conj(erro(n));
????data_temporary?=?data_instantaneous((iterations-n):(iterations+M_rank-n-1));%臨時
????real_data(n+1)?=?weight(:n+1)‘*data_temporary;
????erro(n+1)?=?expct_data(n+1)?-?real_data(n+1);
????mse(n+1)?=?erro(n+1)^2;
end
%畫圖
figure(3)
plot(mse‘r‘);
title(‘LMS算法一次實驗學習曲線‘);
xlabel(‘迭代次數n‘)
ylabel
?屬性????????????大小?????日期????時間???名稱
-----------?---------??----------?-----??----
?????目錄???????????0??2019-10-31?20:34??SD&LMS\
?????文件????????4351??2019-10-29?15:02??SD&LMS\least_mean_square.m
?????文件????????6172??2019-10-31?20:25??SD&LMS\least_mean_square_ex.m
?????文件????????1460??2019-10-31?20:30??SD&LMS\LMS.m
?????文件????????3196??2019-10-31?19:18??SD&LMS\steepest_descent_method.m
評論
共有 條評論