سلام دوستان من یه شبکه عصبی رو با الگوریتم لونبرگ اموزش دادم از طریق تولباکس متلب 2014 ..........
در اخر بهم یه فایل به اسم matlab fuction داده که ماتریس وزن ها و بایاس ها توش هست ............
میخوام بدونم تابع تخمین من به چه صورته ؟ مثلآ
Y=?
جای علامت سوال باید چی باشه ؟
ابنم کد فایل متلب فانکشن
در اخر بهم یه فایل به اسم matlab fuction داده که ماتریس وزن ها و بایاس ها توش هست ............
میخوام بدونم تابع تخمین من به چه صورته ؟ مثلآ
Y=?
جای علامت سوال باید چی باشه ؟
ابنم کد فایل متلب فانکشن
function [Y,Xf,Af] = myNeuralNetworkFunction(X,~,~)
%MYNEURALNETWORKFUNCTION neural network simulation function.
%
% Generated by Neural Network Toolbox function genFunction, 27-Oct-2015 05
00.
%
% [Y] = myNeuralNetworkFunction(X,~,~) takes these arguments:
%
% X = 1xTS cell, 1 inputs over TS timsteps
% Each X{1,ts} = 4xQ matrix, input #1 at timestep ts.
%
% and returns:
% Y = 1xTS cell of 1 outputs over TS timesteps.
% Each Y{1,ts} = 1xQ matrix, output #1 at timestep ts.
%
% where Q is number of samples (or series) and TS is the number of timesteps.
%#ok<*RPMT0>
% ===== NEURAL NETWORK CONSTANTS =====
% Input 1
x1_step1_xoffset = [1;0.3;3.37;17.1];
x1_step1_gain = [0.00549450549450549;5;1.08108108108108;0.052083333 3333333];
x1_step1_ymin = -1;
% Layer 1
b1 = [-4.3357181768066573;1.1376287230430129;5.3900006700 201963;-1.7994150277169461;0.4436399429664884;-7.2414338519156907;-0.027778983903903653;6.977753555996304;7.348980866 9537702;-10.424105785027471;2.2151000706148722;-1.1993938299075555;8.8987736101323645;-10.366046189559293;1.574408387131526;3.37973576198 81376;-2.8269030769417749;-0.96628047185925636;-1.6215814104194888;-0.73151610783020238;1.1554888469975126];
IW1_1 = [1.6277460869389437 -0.874809246857847 6.3695239344806307 4.9285263699303021;-2.1101508930681305 -3.6642863016482439 -5.3642169068569059 14.553020601290301;3.9867112979713073 -2.0640813897609291 1.2365090323694861 -16.36389898415694;-2.0962854794237731 5.992936135846743 -0.96639457910018656 0.081823178818178466;-2.0889978826596574 1.2024095377876745 -4.5813091800980148 1.2626347932853863;0.058720482220370768 -1.621978326947743 8.9503637226553145 -7.8830567873527864;4.577132063585359 1.4247532855602125 -9.5267502654128702 -5.664923619691776;-10.157121932481852 -7.3025884743756526 -11.226554075604669 -13.448042913124263;8.6484160995044022 6.403745508273528 3.3745692129034368 -3.018735951097721;-11.341441210409334 1.4895003938804541 3.9917388196738282 -4.6652344848937686;1.9036465384142329 0.5118147818250256 -4.6002196838650322 5.4509108216419717;4.0669929777758584 -2.2902138612887413 2.2391861266542694 -6.1004107544701487;13.763737260726634 -0.27737849357305844 2.681067168664963 -0.48604749048632079;-14.461210105723799 -0.89179464386601937 -5.8458820636875446 4.5775430257796561;12.825347226056479 4.7257355938469106 2.1403229778736361 -2.0556459581658766;3.7097779154322716 -9.6283028800109864 1.540724915244051 0.51296154757178603;-2.7098864323627914 -1.998561880124168 2.6156272324599557 -1.0497564100132064;0.28812418381236415 -2.959005038559428 1.79996454294222 0.42560780245931429;-2.8773970239896642 0.76145538252222089 -0.89039153865876008 1.9461109130923966;-1.0924947832971843 1.9247492293016486 -1.9165790260762801 1.4092342302571055;-0.48413996660949815 -0.19452273833685838 0.10041090781943215 5.1313934185371721];
% Layer 2
b2 = 0.25544812618286344;
LW2_1 = [0.26950144119551178 -0.17514499113388571 -0.096081006048322481 1.4446934417981385 0.34848666147993335 0.65987554057797393 0.073809111973780733 -0.098653797662849735 -0.19340767120166275 -0.22333982516831113 -0.22491843590305408 -0.14781917814452483 -0.081416497146157438 -0.24362342200164741 0.24402005347379435 1.1044007259062876 -0.8159536791998242 0.37347883269436993 0.97334653189449227 -1.1754971013410391 0.20047649176908264];
% Output 1
y1_step1_ymin = -1;
y1_step1_gain = 0.0433745391455216;
y1_step1_xoffset = 12.82;
% ===== SIMULATION ========
% Format Input Arguments
isCellX = iscell(X);
if ~isCellX, X = {X}; end;
% Dimensions
TS = size(X,2); % timesteps
if ~isempty(X)
Q = size(X{1},2); % samples/series
else
Q = 0;
end
% Allocate Outputs
Y = cell(1,TS);
% Time loop
for ts=1:TS
% Input 1
Xp1 = mapminmax_apply(X{1,ts},x1_step1_gain,x1_step1_xof fset,x1_step1_ymin);
% Layer 1
a1 = tansig_apply(repmat(b1,1,Q) + IW1_1*Xp1);
% Layer 2
a2 = repmat(b2,1,Q) + LW2_1*a1;
% Output 1
Y{1,ts} = mapminmax_reverse(a2,y1_step1_gain,y1_step1_xoffse t,y1_step1_ymin);
end
% Final Delay States
Xf = cell(1,0);
Af = cell(2,0);
% Format Output Arguments
if ~isCellX, Y = cell2mat(Y); end
end
% ===== MODULE FUNCTIONS ========
% Map Minimum and Maximum Input Processing Function
function y = mapminmax_apply(x,settings_gain,settings_xoffset,s ettings_ymin)
y = bsxfun(@minus,x,settings_xoffset);
y = bsxfun(@times,y,settings_gain);
y = bsxfun(@plus,y,settings_ymin);
end
% Sigmoid Symmetric Transfer Function
function a = tansig_apply(n)
a = 2 ./ (1 + exp(-2*n)) - 1;
end
% Map Minimum and Maximum Output Reverse-Processing Function
function x = mapminmax_reverse(y,settings_gain,settings_xoffset ,settings_ymin)
x = bsxfun(@minus,y,settings_ymin);
x = bsxfun(@rdivide,x,settings_gain);
x = bsxfun(@plus,x,settings_xoffset);
end
%MYNEURALNETWORKFUNCTION neural network simulation function.
%
% Generated by Neural Network Toolbox function genFunction, 27-Oct-2015 05

%
% [Y] = myNeuralNetworkFunction(X,~,~) takes these arguments:
%
% X = 1xTS cell, 1 inputs over TS timsteps
% Each X{1,ts} = 4xQ matrix, input #1 at timestep ts.
%
% and returns:
% Y = 1xTS cell of 1 outputs over TS timesteps.
% Each Y{1,ts} = 1xQ matrix, output #1 at timestep ts.
%
% where Q is number of samples (or series) and TS is the number of timesteps.
%#ok<*RPMT0>
% ===== NEURAL NETWORK CONSTANTS =====
% Input 1
x1_step1_xoffset = [1;0.3;3.37;17.1];
x1_step1_gain = [0.00549450549450549;5;1.08108108108108;0.052083333 3333333];
x1_step1_ymin = -1;
% Layer 1
b1 = [-4.3357181768066573;1.1376287230430129;5.3900006700 201963;-1.7994150277169461;0.4436399429664884;-7.2414338519156907;-0.027778983903903653;6.977753555996304;7.348980866 9537702;-10.424105785027471;2.2151000706148722;-1.1993938299075555;8.8987736101323645;-10.366046189559293;1.574408387131526;3.37973576198 81376;-2.8269030769417749;-0.96628047185925636;-1.6215814104194888;-0.73151610783020238;1.1554888469975126];
IW1_1 = [1.6277460869389437 -0.874809246857847 6.3695239344806307 4.9285263699303021;-2.1101508930681305 -3.6642863016482439 -5.3642169068569059 14.553020601290301;3.9867112979713073 -2.0640813897609291 1.2365090323694861 -16.36389898415694;-2.0962854794237731 5.992936135846743 -0.96639457910018656 0.081823178818178466;-2.0889978826596574 1.2024095377876745 -4.5813091800980148 1.2626347932853863;0.058720482220370768 -1.621978326947743 8.9503637226553145 -7.8830567873527864;4.577132063585359 1.4247532855602125 -9.5267502654128702 -5.664923619691776;-10.157121932481852 -7.3025884743756526 -11.226554075604669 -13.448042913124263;8.6484160995044022 6.403745508273528 3.3745692129034368 -3.018735951097721;-11.341441210409334 1.4895003938804541 3.9917388196738282 -4.6652344848937686;1.9036465384142329 0.5118147818250256 -4.6002196838650322 5.4509108216419717;4.0669929777758584 -2.2902138612887413 2.2391861266542694 -6.1004107544701487;13.763737260726634 -0.27737849357305844 2.681067168664963 -0.48604749048632079;-14.461210105723799 -0.89179464386601937 -5.8458820636875446 4.5775430257796561;12.825347226056479 4.7257355938469106 2.1403229778736361 -2.0556459581658766;3.7097779154322716 -9.6283028800109864 1.540724915244051 0.51296154757178603;-2.7098864323627914 -1.998561880124168 2.6156272324599557 -1.0497564100132064;0.28812418381236415 -2.959005038559428 1.79996454294222 0.42560780245931429;-2.8773970239896642 0.76145538252222089 -0.89039153865876008 1.9461109130923966;-1.0924947832971843 1.9247492293016486 -1.9165790260762801 1.4092342302571055;-0.48413996660949815 -0.19452273833685838 0.10041090781943215 5.1313934185371721];
% Layer 2
b2 = 0.25544812618286344;
LW2_1 = [0.26950144119551178 -0.17514499113388571 -0.096081006048322481 1.4446934417981385 0.34848666147993335 0.65987554057797393 0.073809111973780733 -0.098653797662849735 -0.19340767120166275 -0.22333982516831113 -0.22491843590305408 -0.14781917814452483 -0.081416497146157438 -0.24362342200164741 0.24402005347379435 1.1044007259062876 -0.8159536791998242 0.37347883269436993 0.97334653189449227 -1.1754971013410391 0.20047649176908264];
% Output 1
y1_step1_ymin = -1;
y1_step1_gain = 0.0433745391455216;
y1_step1_xoffset = 12.82;
% ===== SIMULATION ========
% Format Input Arguments
isCellX = iscell(X);
if ~isCellX, X = {X}; end;
% Dimensions
TS = size(X,2); % timesteps
if ~isempty(X)
Q = size(X{1},2); % samples/series
else
Q = 0;
end
% Allocate Outputs
Y = cell(1,TS);
% Time loop
for ts=1:TS
% Input 1
Xp1 = mapminmax_apply(X{1,ts},x1_step1_gain,x1_step1_xof fset,x1_step1_ymin);
% Layer 1
a1 = tansig_apply(repmat(b1,1,Q) + IW1_1*Xp1);
% Layer 2
a2 = repmat(b2,1,Q) + LW2_1*a1;
% Output 1
Y{1,ts} = mapminmax_reverse(a2,y1_step1_gain,y1_step1_xoffse t,y1_step1_ymin);
end
% Final Delay States
Xf = cell(1,0);
Af = cell(2,0);
% Format Output Arguments
if ~isCellX, Y = cell2mat(Y); end
end
% ===== MODULE FUNCTIONS ========
% Map Minimum and Maximum Input Processing Function
function y = mapminmax_apply(x,settings_gain,settings_xoffset,s ettings_ymin)
y = bsxfun(@minus,x,settings_xoffset);
y = bsxfun(@times,y,settings_gain);
y = bsxfun(@plus,y,settings_ymin);
end
% Sigmoid Symmetric Transfer Function
function a = tansig_apply(n)
a = 2 ./ (1 + exp(-2*n)) - 1;
end
% Map Minimum and Maximum Output Reverse-Processing Function
function x = mapminmax_reverse(y,settings_gain,settings_xoffset ,settings_ymin)
x = bsxfun(@minus,y,settings_ymin);
x = bsxfun(@rdivide,x,settings_gain);
x = bsxfun(@plus,x,settings_xoffset);
end
دیدگاه