📄 ann.mdl
字号:
"[0.03 0.003 0.1]|zeros(1*(50*(50+4+4)+2),1)|0.05"
Port {
PortNumber 1
Name "nyn"
TestPoint off
RTWStorageClass "Auto"
}
System {
Name "DCSL (Matlab)"
Location [122, 291, 431, 467]
Open off
ModelBrowserVisibility off
ModelBrowserWidth 200
ScreenColor "automatic"
PaperOrientation "landscape"
PaperPositionMode "auto"
PaperType "usletter"
PaperUnits "inches"
ZoomFactor "100"
AutoZoom on
Block {
BlockType Inport
Name "x"
Position [30, 48, 60, 62]
Port "1"
PortWidth "-1"
SampleTime "-1"
DataType "auto"
SignalType "auto"
Interpolate on
}
Block {
BlockType Inport
Name "e"
Position [30, 73, 60, 87]
Port "2"
PortWidth "-1"
SampleTime "-1"
DataType "auto"
SignalType "auto"
Interpolate on
}
Block {
BlockType Inport
Name "LE"
Position [30, 98, 60, 112]
Port "3"
PortWidth "-1"
SampleTime "-1"
DataType "auto"
SignalType "auto"
Interpolate on
}
Block {
BlockType Demux
Name "Demux1"
Ports [1, 2, 0, 0, 0]
Position [210, 40, 215, 115]
BackgroundColor "black"
ShowName off
Outputs "[Dim(2) (2+nrlat(1)*(nrlat(1)+Dim(1)+4))*Di"
"m(2)]"
}
Block {
BlockType Mux
Name "Mux5"
Ports [3, 1, 0, 0, 0]
Position [90, 40, 95, 120]
ShowName off
Inputs "[Dim(1) Dim(2) 1]"
DisplayOption "bar"
}
Block {
BlockType "S-Function"
Name "S-Function"
Ports [1, 1, 0, 0, 0]
Position [120, 56, 185, 104]
FunctionName "dcslin4"
Parameters "Dim,nrlat,eta,S,T"
PortCounts "[]"
SFunctionModules "''"
MaskIconFrame on
MaskIconOpaque on
MaskIconRotate "none"
MaskIconUnits "autoscale"
}
Block {
BlockType Outport
Name "ys"
Position [235, 53, 265, 67]
Port "1"
OutputWhenDisabled "held"
InitialOutput "[]"
}
Block {
BlockType Outport
Name "X"
Position [235, 88, 265, 102]
Port "2"
OutputWhenDisabled "held"
InitialOutput "[]"
}
Line {
SrcBlock "LE"
SrcPort 1
DstBlock "Mux5"
DstPort 3
}
Line {
SrcBlock "e"
SrcPort 1
DstBlock "Mux5"
DstPort 2
}
Line {
SrcBlock "x"
SrcPort 1
DstBlock "Mux5"
DstPort 1
}
Line {
SrcBlock "S-Function"
SrcPort 1
DstBlock "Demux1"
DstPort 1
}
Line {
SrcBlock "Mux5"
SrcPort 1
DstBlock "S-Function"
DstPort 1
}
Line {
SrcBlock "Demux1"
SrcPort 2
DstBlock "X"
DstPort 1
}
Line {
SrcBlock "Demux1"
SrcPort 1
DstBlock "ys"
DstPort 1
}
}
}
Block {
BlockType SubSystem
Name "EBPA"
Ports [3, 2, 0, 0, 0]
Position [265, 49, 335, 131]
BackgroundColor "red"
ShowPortLabels on
MaskType "EBPA NN"
MaskDescription " Self Adaptive Discrete Time EBPA Neural Netwo"
"rk"
MaskHelp "<p>\n This Neural Network is used to adaptivel"
"y approximate\n a (possibly nonlinear) vector field y=f(x),\n with the in"
"put vector x being a function of time.\n</p>\n<p>\n The first input is x. <b"
"r>\n The second input is the error signal (i.e. e=y-ys). <br>\n The third i"
"nput is the learning enable: <br>\n with LE=1 the learning is enabled, "
"\n with LE=0 the learning is disabled.\n</p>\n<p>\n The first output i"
"s the learned function ys(x).<br>\n The second output is the states matrix r"
"eshaped columnwise.\n</p>\n<p>\n The first parameter in the mask is a vector"
" containing respectively \n the number of inputs Ni, the number of neurons i"
"n the hidden layer Nh,\n and the number of outputs No.<br>\n The second par"
"ameter contains the learning rates for:<br>\n 1) the weights connecting inp"
"ut and hidden layers (V)<br>\n 2) the weights connecting hidden and output "
"layers (W)<br>\n 3) the vector of parameters [Pw Uo Lo To Pv Uh Lh Th]<br>"
"\n The third parameter is the so called momentum.\n</p>\n<p>\n The initial "
"condition could be a vector of size\n 2*(Nh*(Ni+No)+4*(No+Nh)), or a scalar,"
" in the latter\n case, the scalar multiplies the weights of an \n appropria"
"te random initial condition vector.\n</p>\n<p>\n STATE VECTOR MEANING: <br>"
"\n The state is a column vector composed by 2 contiguous \n parts having bo"
"th Nh*(Ni+No)+4*(No+Nh) elements. <br> \n The states in the first part are o"
"rganized as follows: <br>\n [1..Nh*Ni] : weights connecting the input to the"
" hidden layer. <br>\n Nh*Ni+[1..No*Nh] : weights connecting the hidden layer"
" to the output layer. <br>\n Nh*Ni+No*Nh+[1..No] : hidden layer to output th"
"reshold vector (Pv).<br>\n Nh*Ni+No*Nh+No+[1..No] : upper limits of the outp"
"ut layer base functions (Uo).<br>\n Nh*Ni+No*Nh+2*No+[1..No] : lower limits "
"of the output layer base functions (Lo).<br>\n Nh*Ni+No*Nh+3*No+[1..No] : sl"
"opes of the output layer base functions (To).<br>\n Nh*Ni+No*Nh+4*No+[1..Nh]"
" : input to hidden layer threshold vector (Pw).<br>\n Nh*Ni+No*Nh+4*No+Nh+[1"
"..Nh] : upper limits of the hidden layer base functions (Uh).<br>\n Nh*Ni+No"
"*Nh+4*No+2*Nh+[1..Nh] : lower limits of the hidden layer base functions (Lh)."
"<br>\n Nh*Ni+No*Nh+4*No+3*Nh+[1..Nh] : slopes of the hidden layer base funct"
"ions (Th).<br>\n The second half of the state vector contains the states of "
"an error filter\n that somehow represents the past increments of the first h"
"alf of the state vector.\n See below in the \"state equation\" section for a"
" more detailed explanation of this.\n</p>\n<p>\n BRIEF EXPLANATION OF THE AL"
"GORITHM: <br>\n This Neural Network is essentially a 2-layered sigmoidal Neu"
"ral Network,\n in which the usual sigmoidal base function is replaced by a m"
"ore flexible\n function: <br> f(s) = L + (U-L)/(1+exp(-s/T)) <br>\n where "
"s is the (scalar) input to the function,\n L is the lower limit, U the upper"
" limit, and T the slope.<br>\n The function reduces to the usual sigmoid whe"
"n L=0, U=1 and T=1.<br>\n Two affine transformations (i.e. having the form y"
"=A*x+b) connect\n the input to the hidden layer and the hidden layer to the "
"output layer. <br>\n The learning algorithm allows the parameters A,b,L,U,T "
"for each layer to\n change. An extended gradient rule, structured according "
"to the well known\n backpropagation algorithm, is used to update the paramet"
"ers. <br>\n The acronym EBPA stands for Extended BackPropagation Algorithm, "
"\n and it is (somewhat improperly) used to refer to this kind of\n neural n"
"etwork architectures. <br>\n<br>\n OUTPUT EQUATION: <br>\n At any given tim"
"e t, if x(t) is the input vector, then the i-th \n element of the output vec"
"tor AT THE HIDDEL LAYER is:\n z(i,t) = Lh(i,t) + (Uh(i,t)-Lh(i,t))/(1+exp(-("
" V(i,t)*x(t)+Pv(i,t) )/Th(i,t)))\n where Lh(i,t) is the i-th element of the "
"lower limit vector (for the hidden layer),\n Uh(i,t) is the i-th element of "
"the upper limit vector (for the hidden layer),\n Th(i,t) is the i-th element"
" of the slope vector (for the hidden layer),\n V(i,t) is the i-th row of the"
" (input to hidden layer) weight matrix,\n and Pv(i,t) is the i-th element of"
" the (input to hidden layer) threshold vector. <br>\n The i-th element of th"
"e output vector AT THE NETWORK OUTPUT is:\n ys(i,t) = Lo(i,t) + (Uo(i,t)-Lo("
"i,t))/(1+exp(-( W(i,t)*z(t)+Pw(i,t) )/To(i,t)))\n where Lo(i,t) is the i-th "
"element of the lower limit vector (for the output layer),\n Uo(i,t) is the i"
"-th element of the upper limit vector (for the output layer),\n To(i,t) is t"
"he i-th element of the slope vector (for the output layer),\n W(i,t) is the "
"i-th row of the (hidden layer to output) weight matrix,\n and Pw(i,t) is the"
" i-th element of the (hidden layer to output) threshold vector.\n<br><br>\n "
"STATE EQUATION (Learning Algorithm): <br>\n Being e(t)=y(t)-ys(t) the error "
"vector at time t, the vector X(t)\n containing the first Nh*(Ni+No)+4*(No+Nh"
") neural network states \n is updated according to an \"extended\" gradient "
"rule: <br>\n X(t+T)=X(t)-eta*(dys/dX)*e(t)+Z(t) <br>\n where eta is the lea"
"rning rate, dys/dX is the jacobian matrix,\n T is the sampling time, and Z(t"
") represents an additional \n contribution from a filtered error:<br>\n D(t"
"+1)=alpha*D(t)-eta*(dys/dX)*e(t) <br>\n Z(t)=alpha*D(t) <br>\n The filter d"
"ecay rate, alpha, is called \"momentum\", if alpha=0, \n the update law redu"
"ces to the classic gradient rule.<br>\n It can be seen Z(t) somehow represen"
"ts all the past increments of X(t).<br>\n The final part of the whole state "
"vector is D(t).\n</p>\n<p>\n The final mask parameter is the sampling time o"
"f the block, T.\n</p>\n<p>\n This block is implemented in Simulink, \n to u"
"se it you should have the smxl library in your path. \n For further referenc"
"e see some papers on the backpropagation \n algorithm applied to multilayer "
"sigmoidal neural networks.<br>\n</p>\n<p>\n Giampiero Campa, June 21 2003\n<"
"/p>"
MaskPromptString "[ni nh no]|[etaV etaW etaP]|Momentum (alpha)|In"
"itial Conditions|Sample Time"
MaskStyleString "edit,edit,edit,edit,edit"
MaskTunableValueString "on,on,on,on,on"
MaskCallbackString "||||"
MaskEnableString "on,on,on,on,on"
MaskVisibilityString "on,on,on,on,on"
MaskVariables "dim=@1;eta=@2;alp=@3;ini=@4;T=@5;"
MaskInitialization "ni=dim(1);nh=dim(2);no=dim(3);\nns=nh*ni+no*nh+"
"4*no+4*nh;\netaV=eta(1);\netaW=eta(2);\netaP=eta(3);\n\nif size(ini)==[1 1],"
"\n V0 = ini*reshape( rand(nh,ni)-0.5 ,nh*ni,1 );\n W0 = ini*reshape( rand"
"(no,nh)-0.5 ,no*nh,1 );\n Gm0 = ini*(rand(no,1) - 0.5);\n Uo0= ones(no,"
"1);\n Lo0=-1*ones(no,1);\n To0= ones(no,1);\n Te0 = ini*(rand(nh,1) -"
" 0.5);\n Uh0= ones(nh,1);\n Lh0=-1*ones(nh,1);\n Th0= ones(nh,1);\n"
"\n x0=[V0;W0;Gm0;Uo0;Lo0;To0;Te0;Uh0;Lh0;Th0];\n d0=x0*0;\n\nelseif size(in"
"i)==[2*ns 1],\n x0=ini(1:ns);\n d0=ini(ns+1:2*ns);\nelse\n warning(['The "
"initial condition size must be 1 by 1 or ' mat2str(2*ns) ' by 1']);\nend\n"
MaskIconFrame on
MaskIconOpaque on
MaskIconRotate "none"
MaskIconUnits "autoscale"
MaskValueString "[4 10 1]|[0.003 0.003 0.003]/10|0.01|1|0.05"
System {
Name "EBPA"
Location [38, 57, 988, 900]
Open off
ModelBrowserVisibility off
ModelBrowserWidth 200
ScreenColor "automatic"
PaperOrientation "landscape"
PaperPositionMode "auto"
PaperType "usletter"
PaperUnits "inches"
ZoomFactor "100"
AutoZoom on
Block {
BlockType Inport
Name "x"
Position [65, 413, 95, 427]
Port "1"
PortWidth "-1"
SampleTime "-1"
DataType "auto"
SignalType "auto"
Interpolate on
}
Block {
BlockType Inport
Name "e"
Position [50, 558, 80, 572]
NamePlacement "alternate"
Port "2"
PortWidth "-1"
SampleTime "-1"
DataType "auto"
SignalType "auto"
Interpolate on
}
Block {
BlockType Inport
Name "LE"
Position [660, 787, 690, 803]
Port "3"
PortWidth "-1"
SampleTime "-1"
DataType "auto"
SignalType "auto"
Interpolate on
}
Block {
BlockType SubSystem
Name "1/z"
Ports [11, 11, 0, 0, 0]
Position [720, 630, 795, 810]
NamePlacement "alternate"
ShowName off
ShowPortLabels on
System {
Name "1/z"
Location [249, 83, 1015, 520]
Open off
ModelBrowserVisibility off
ModelBrowserWidth 200
ScreenColor "automatic"
PaperOrientation "landscape"
PaperPositionMode "auto"
PaperType "usletter"
PaperUnits "inches"
ZoomFactor "100"
AutoZoom on
Block {
BlockType Inport
Name "DV"
Position [20, 58, 50, 72]
Port "1"
PortWidth "-1"
SampleTime "-1"
DataType "auto"
SignalType "auto"
Interpolate on
}
Block {
BlockType Inport
Name "DW"
Position [20, 93, 50, 107]
Port "2"
PortWidth "-1"
SampleTime "-1"
DataType "auto"
SignalType "auto"
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -