資源簡(jiǎn)介
[alphabet,targets]=prprob; % Character recognition problem definition
S1=10; % 定義隱層神經(jīng)元數(shù)目
[R, Q]= size(alphabet); % 獲取輸入變量數(shù)
[S2,Q]=size(targets); % 獲取輸出變量數(shù)
P=alphabet; % P為輸入矩陣
net= newff(minmax(P),[S1 S2],{'logsig' 'logsig'},'traingdx'); % 生成BP網(wǎng)絡(luò)
net.LW{2,1}= net.LW{2,1}*0.01; % 調(diào)整輸出層權(quán)值
net.b{2}= net.b{2}*0.01;
代碼片段和文件信息
%神經(jīng)網(wǎng)絡(luò)進(jìn)行字符識(shí)別
close?all
clear
echo?on
clc
%用BP神經(jīng)網(wǎng)絡(luò)進(jìn)行英文字符識(shí)別
%NEWFF——生成一個(gè)新的bp神經(jīng)網(wǎng)絡(luò)
%TRAIN——對(duì)神經(jīng)網(wǎng)絡(luò)進(jìn)行訓(xùn)練
%SIM——對(duì)神經(jīng)網(wǎng)絡(luò)進(jìn)行仿真
pause?%?敲任意鍵開(kāi)始
clc
%載入訓(xùn)練樣本
[alphabettargets]=prprob
[R1Q1]=size(alphabet)
[R2Q2]=size(targets)
pause
clc
%生成神經(jīng)網(wǎng)絡(luò)
S1=10
S2=R2
net=newff(minmax(alphabet)[S1?S2]{‘logsig‘?‘logsig‘}‘traingdx‘);
net.LW{21}=net.LW{21}*0.01;
pause
clc
%訓(xùn)練神經(jīng)網(wǎng)絡(luò)
%1.用理想樣本訓(xùn)練神經(jīng)網(wǎng)絡(luò)
P=alphabet;
T=targets;
%net.performFcn=‘see‘;
net.trainParam.goal=0.1;
net.trainParam.show=20;
net.trainParam.epochs=5000;
net.trainParam.mc=0.095;
[nettr]=train(netPT);
pause
clc
%2.用喊有不同程度噪聲的樣本訓(xùn)練神經(jīng)網(wǎng)絡(luò)
netn=net;
netn.trainParam.goal=0.6;
netn.trainPAram.epochs=300;
T=[targets?targets?targets?targets];
for?pass=1:10
fprintf(‘Pass=%.0f\n‘pass);
P=[alphabetalphabet(alphabet+randn(R1Q1)*0.1)(alphabet+randn(R1Q1)*0.2)];
[netntr]=train(netnPT);
echo?off
end
echo?on
pause
%3.再次用理想樣本訓(xùn)練神經(jīng)網(wǎng)絡(luò)
netn.trainParam.goal=0.1;
netn.trainParam.epochs=500;
netn.trainParam.show=5;
P=alphabet;
T=targets;
[netntr]=train(netnPT);
%訓(xùn)練
評(píng)論
共有 條評(píng)論