📄 trainlm.c
字号:
* %
* % Training occurs according to the TRAINLM's training parameters
* % shown here with their default values:
* % net.trainParam.epochs 100 Maximum number of epochs to train
* % net.trainParam.goal 0 Performance goal
* % net.trainParam.max_fail 5 Maximum validation failures
* % net.trainParam.mem_reduc 1 Factor to use for memory/speed trade off.
* % net.trainParam.min_grad 1e-10 Minimum performance gradient
* % net.trainParam.mu 0.001 Initial Mu
* % net.trainParam.mu_dec 0.1 Mu decrease factor
* % net.trainParam.mu_inc 10 Mu increase factor
* % net.trainParam.mu_max 1e10 Maximum Mu
* % net.trainParam.show 25 Epochs between displays (NaN for no displays)
* % net.trainParam.time inf Maximum time to train in seconds
* %
* % Dimensions for these variables are:
* % Pd - NoxNixTS cell array, each element P{i,j,ts} is a DijxQ matrix.
* % Tl - NlxTS cell array, each element P{i,ts} is a VixQ matrix.
* % Ai - NlxLD cell array, each element Ai{i,k} is an SixQ matrix.
* % Where
* % Ni = net.numInputs
* % Nl = net.numLayers
* % LD = net.numLayerDelays
* % Ri = net.inputs{i}.size
* % Si = net.layers{i}.size
* % Vi = net.targets{i}.size
* % Dij = Ri * length(net.inputWeights{i,j}.delays)
* %
* % If VV or TV is not [], it must be a structure of vectors:
* % VV.PD, TV.PD - Validation/test delayed inputs.
* % VV.Tl, TV.Tl - Validation/test layer targets.
* % VV.Ai, TV.Ai - Validation/test initial input conditions.
* % VV.Q, TV.Q - Validation/test batch size.
* % VV.TS, TV.TS - Validation/test time steps.
* % Validation vectors are used to stop training early if the network
* % performance on the validation vectors fails to improve or remains
* % the same for MAX_FAIL epochs in a row. Test vectors are used as
* % a further check that the network is generalizing well, but do not
* % have any effect on training.
* %
* % TRAINLM(CODE) return useful information for each CODE string:
* % 'pnames' - Names of training parameters.
* % 'pdefaults' - Default training parameters.
* %
* % Network Use
* %
* % You can create a standard network that uses TRAINLM with
* % NEWFF, NEWCF, or NEWELM.
* %
* % To prepare a custom network to be trained with TRAINLM:
* % 1) Set NET.trainFcn to 'trainlm'.
* % This will set NET.trainParam to TRAINLM's default parameters.
* % 2) Set NET.trainParam properties to desired values.
* %
* % In either case, calling TRAIN with the resulting network will
* % train the network with TRAINLM.
* %
* % See NEWFF, NEWCF, and NEWELM for examples.
* %
* % Algorithm
* %
* % TRAINLM can train any network as long as its weight, net input,
* % and transfer functions have derivative functions.
* %
* % Backpropagation is used to calculate the Jacobian jX of performance
* % PERF with respect to the weight and bias variables X. Each
* % variable is adjusted according to Levenberg-Marquardt,
* %
* % jj = jX * jX
* % je = jX * E
* % dX = -(jj+I*mu) \ je
* %
* % where E is all errors and I is the identity matrix.
* %
* % The adaptive value MU is increased by MU_INC until the change above
* % results in a reduced performance value. The change is then made to
* % the network and mu is decreased by MU_DEC.
* %
* % The parameter MEM_REDUC indicates how to use memory and speed to
* % calculate the Jacobian jX. If MEM_REDUC is 1, then TRAINLM runs
* % the fastest, but can require a lot of memory. Increasing MEM_REDUC
* % to 2, cuts some of the memory required by a factor of two, but
* % slows TRAINLM somewhat. Higher values continue to decrease the
* % amount of memory needed and increase training times.
* %
* % Training stops when any of these conditions occurs:
* % 1) The maximum number of EPOCHS (repetitions) is reached.
* % 2) The maximum amount of TIME has been exceeded.
* % 3) Performance has been minimized to the GOAL.
* % 4) The performance gradient falls below MINGRAD.
* % 5) MU exceeds MU_MAX.
* % 6) Validation performance has increased more than MAX_FAIL times
* % since the last time it decreased (when using validation).
* %
* % Unlike other training functions, TRAINLM assumes the network has
* % the MSE performance function. This is a basic assumption of the
* % Levenberg-Marquardt algorithm.
* %
* % See also NEWFF, NEWCF, TRAINGD, TRAINGDM, TRAINGDA, TRAINGDX.
*
* % Mark Beale, 11-31-97, ODJ 11/20/98
* % Copyright 1992-2002 The MathWorks, Inc.
* % $Revision: 1.20 $ $Date: 2002/03/26 00:18:45 $
*
* % **[ NNT2 Support ]**
* if ~isa(net,'struct') & ~isa(net,'char')
*/
{
mxArray * a_
= mclInitialize(mclNot(mlfIsa(mclVa(net, "net"), _mxarray12_)));
if (mlfTobool(a_)
&& mlfTobool(
mclAnd(a_, mclNot(mlfIsa(mclVa(net, "net"), _mxarray0_))))) {
mxDestroyArray(a_);
/*
* nntobsu('trainlm','Use NNT2FF and TRAIN to update and train your network.')
*/
mlfNntobsu(_mxarray2_, _mxarray4_, NULL);
/*
* switch(nargin)
*/
{
mxArray * v_ = mclInitialize(mlfScalar(nargin_));
if (mclSwitchCompare(v_, _mxarray6_)) {
/*
* case 5, [net,tr,Ac,El] = tlm1(net,Pd,Tl,Ai,Q); return
*/
mlfAssign(
&net,
mlfTlm1(
tr,
Ac,
El,
mclVa(net, "net"),
mclVa(Pd, "Pd"),
mclVa(Tl, "Tl"),
mclVa(Ai, "Ai"),
mclVa(Q, "Q"),
NULL));
mxDestroyArray(v_);
goto return_;
/*
* case 6, [net,tr,Ac,El] = tlm1(net,Pd,Tl,Ai,Q,TS); return
*/
} else if (mclSwitchCompare(v_, _mxarray7_)) {
mlfAssign(
&net,
mlfTlm1(
tr,
Ac,
El,
mclVa(net, "net"),
mclVa(Pd, "Pd"),
mclVa(Tl, "Tl"),
mclVa(Ai, "Ai"),
mclVa(Q, "Q"),
mclVa(TS, "TS")));
mxDestroyArray(v_);
goto return_;
/*
* case 8, [net,tr,Ac,El,v5,v6] = tlm2(net,Pd,Tl,Ai,Q,TS,VV,TV); return
*/
} else if (mclSwitchCompare(v_, _mxarray8_)) {
mlfAssign(
&net,
mlfTlm2(
tr,
Ac,
El,
v5,
v6,
mclVa(net, "net"),
mclVa(Pd, "Pd"),
mclVa(Tl, "Tl"),
mclVa(Ai, "Ai"),
mclVa(Q, "Q"),
mclVa(TS, "TS"),
mclVa(VV, "VV"),
mclVa(TV, "TV"),
NULL));
mxDestroyArray(v_);
goto return_;
/*
* case 9, [net,tr,Ac,El,v5,v6] = tlm2(net,Pd,Tl,Ai,Q,TS,VV,TV,v9); return
*/
} else if (mclSwitchCompare(v_, _mxarray9_)) {
mlfAssign(
&net,
mlfTlm2(
tr,
Ac,
El,
v5,
v6,
mclVa(net, "net"),
mclVa(Pd, "Pd"),
mclVa(Tl, "Tl"),
mclVa(Ai, "Ai"),
mclVa(Q, "Q"),
mclVa(TS, "TS"),
mclVa(VV, "VV"),
mclVa(TV, "TV"),
mclVa(v9, "v9")));
mxDestroyArray(v_);
goto return_;
/*
* case 11, [net,tr,Ac,El,v5,v6,v7,v8] = tlm3(net,Pd,Tl,Ai,Q,TS,VV,TV,v9,v10,v11); return
*/
} else if (mclSwitchCompare(v_, _mxarray10_)) {
mlfAssign(
&net,
mlfTlm3(
tr,
Ac,
El,
v5,
v6,
v7,
v8,
mclVa(net, "net"),
mclVa(Pd, "Pd"),
mclVa(Tl, "Tl"),
mclVa(Ai, "Ai"),
mclVa(Q, "Q"),
mclVa(TS, "TS"),
mclVa(VV, "VV"),
mclVa(TV, "TV"),
mclVa(v9, "v9"),
mclVa(v10, "v10"),
mclVa(v11, "v11"),
NULL));
mxDestroyArray(v_);
goto return_;
/*
* case 12, [net,tr,Ac,El,v5,v6,v7,v8] = tlm3(net,Pd,Tl,Ai,Q,TS,VV,TV,v9,v10,v11,v12); return
*/
} else if (mclSwitchCompare(v_, _mxarray11_)) {
mlfAssign(
&net,
mlfTlm3(
tr,
Ac,
El,
v5,
v6,
v7,
v8,
mclVa(net, "net"),
mclVa(Pd, "Pd"),
mclVa(Tl, "Tl"),
mclVa(Ai, "Ai"),
mclVa(Q, "Q"),
mclVa(TS, "TS"),
mclVa(VV, "VV"),
mclVa(TV, "TV"),
mclVa(v9, "v9"),
mclVa(v10, "v10"),
mclVa(v11, "v11"),
mclVa(v12, "v12")));
mxDestroyArray(v_);
goto return_;
/*
* end
*/
}
mxDestroyArray(v_);
}
} else {
mxDestroyArray(a_);
}
/*
* end
*/
}
/*
*
* % FUNCTION INFO
* % =============
*
* if isstr(net)
*/
if (mlfTobool(mlfIsstr(mclVa(net, "net")))) {
/*
* switch (net)
*/
mxArray * v_ = mclInitialize(mclVa(net, "net"));
if (mclSwitchCompare(v_, _mxarray14_)) {
/*
* case 'pnames',
* net = fieldnames(trainlm('pdefaults'));
*/
mlfAssign(
&net,
mlfFieldnames(
mlfTrainlm(
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
_mxarray16_,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL)));
/*
* case 'pdefaults',
*/
} else if (mclSwitchCompare(v_, _mxarray16_)) {
/*
* trainParam.epochs = 100;
*/
mlfIndexAssign(&trainParam, ".epochs", _mxarray18_);
/*
* trainParam.goal = 0;
*/
mlfIndexAssign(&trainParam, ".goal", _mxarray19_);
/*
* trainParam.max_fail = 5;
*/
mlfIndexAssign(&trainParam, ".max_fail", _mxarray6_);
/*
* trainParam.mem_reduc = 1;
*/
mlfIndexAssign(&trainParam, ".mem_reduc", _mxarray20_);
/*
* trainParam.min_grad = 1e-10;
*/
mlfIndexAssign(&trainParam, ".min_grad", _mxarray21_);
/*
* trainParam.mu = 0.001;
*/
mlfIndexAssign(&trainParam, ".mu", _mxarray22_);
/*
* trainParam.mu_dec = 0.1;
*/
mlfIndexAssign(&trainParam, ".mu_dec", _mxarray23_);
/*
* trainParam.mu_inc = 10;
*/
mlfIndexAssign(&trainParam, ".mu_inc", _mxarray24_);
/*
* trainParam.mu_max = 1e10;
*/
mlfIndexAssign(&trainParam, ".mu_max", _mxarray25_);
/*
* trainParam.show = 25;
*/
mlfIndexAssign(&trainParam, ".show", _mxarray26_);
/*
* trainParam.time = inf;
*/
mlfIndexAssign(&trainParam, ".time", _mxarray27_);
/*
* net = trainParam;
*/
mlfAssign(&net, mclVv(trainParam, "trainParam"));
/*
* otherwise,
*/
} else {
/*
* error('Unrecognized code.')
*/
mlfError(_mxarray28_, NULL);
/*
* end
*/
}
mxDestroyArray(v_);
/*
* return
*/
goto return_;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -