setx.c
来自「nnToolKit 神经网络工具包是基于 MATLAB 神经网络工具箱自行开发的」· C语言 代码 · 共 315 行
C
315 行
/*
* MATLAB Compiler: 3.0
* Date: Sun May 13 16:47:40 2007
* Arguments: "-B" "macro_default" "-O" "all" "-O" "fold_scalar_mxarrays:on"
* "-O" "fold_non_scalar_mxarrays:on" "-O" "optimize_integer_for_loops:on" "-O"
* "array_indexing:on" "-O" "optimize_conditionals:on" "-M" "-silentsetup" "-d"
* "d:/MATLAB6p5/work/nnToolKit/src" "-B" "csglcom:nnToolKit,nnToolKit,2.0"
* "-B" "sgl" "-m" "-W" "main" "-L" "C" "-t" "-T" "link:exe" "-h"
* "libmmfile.mlib" "-W" "mainhg" "libmwsglm.mlib" "-t" "-W"
* "comhg:nnToolKit,nnToolKit,2.0" "-T" "link:lib" "-h" "libmmfile.mlib" "-i"
* "-i" "D:/MATLAB6p5/work/nnToolKit/lmnet/LmSimu.m"
* "D:/MATLAB6p5/work/nnToolKit/lmnet/LmTrain.m"
* "D:/MATLAB6p5/work/nnToolKit/sofm/SofmSimu.m"
* "D:/MATLAB6p5/work/nnToolKit/sofm/SofmTrain.m"
*/
#include "setx.h"
#include "libmatlbm.h"
static mxArray * _mxarray0_;
void InitializeModule_setx(void) {
_mxarray0_ = mclInitializeDouble(1.0);
}
void TerminateModule_setx(void) {
mxDestroyArray(_mxarray0_);
}
static mxArray * Msetx(int nargout_, mxArray * net_in, mxArray * x);
_mexLocalFunctionTable _local_function_table_setx
= { 0, (mexFunctionTableEntry *)NULL };
/*
* The function "mlfSetx" contains the normal interface for the "setx"
* M-function from file "d:\matlab6p5\toolbox\nnet\nnutils\setx.m" (lines
* 1-61). This function processes any input arguments and passes them to the
* implementation version of the function, appearing above.
*/
mxArray * mlfSetx(mxArray * net_in, mxArray * x) {
int nargout = 1;
mxArray * net = NULL;
mlfEnterNewContext(0, 2, net_in, x);
net = Msetx(nargout, net_in, x);
mlfRestorePreviousContext(0, 2, net_in, x);
return mlfReturnValue(net);
}
/*
* The function "mlxSetx" contains the feval interface for the "setx"
* M-function from file "d:\matlab6p5\toolbox\nnet\nnutils\setx.m" (lines
* 1-61). The feval function calls the implementation version of setx through
* this function. This function processes any input arguments and passes them
* to the implementation version of the function, appearing above.
*/
void mlxSetx(int nlhs, mxArray * plhs[], int nrhs, mxArray * prhs[]) {
mxArray * mprhs[2];
mxArray * mplhs[1];
int i;
if (nlhs > 1) {
mlfError(
mxCreateString(
"Run-time Error: File: setx Line: 1 Column: 1 The function \"setx\""
" was called with more than the declared number of outputs (1)."),
NULL);
}
if (nrhs > 2) {
mlfError(
mxCreateString(
"Run-time Error: File: setx Line: 1 Column: 1 The function \"setx"
"\" was called with more than the declared number of inputs (2)."),
NULL);
}
for (i = 0; i < 1; ++i) {
mplhs[i] = NULL;
}
for (i = 0; i < 2 && i < nrhs; ++i) {
mprhs[i] = prhs[i];
}
for (; i < 2; ++i) {
mprhs[i] = NULL;
}
mlfEnterNewContext(0, 2, mprhs[0], mprhs[1]);
mplhs[0] = Msetx(nlhs, mprhs[0], mprhs[1]);
mlfRestorePreviousContext(0, 2, mprhs[0], mprhs[1]);
plhs[0] = mplhs[0];
}
/*
* The function "Msetx" is the implementation version of the "setx" M-function
* from file "d:\matlab6p5\toolbox\nnet\nnutils\setx.m" (lines 1-61). It
* contains the actual compiled code for that M-function. It is a static
* function and must only be called from one of the interface functions,
* appearing below.
*/
/*
* function net=setx(net,x)
*/
static mxArray * Msetx(int nargout_, mxArray * net_in, mxArray * x) {
mexLocalFunctionTable save_local_function_table_
= mclSetCurrentLocalFunctionTable(&_local_function_table_setx);
mxArray * net = NULL;
mxArray * j = NULL;
mxArray * i = NULL;
mxArray * biasInd = NULL;
mxArray * layerWeightInd = NULL;
mxArray * inputWeightInd = NULL;
mxArray * biasLearn = NULL;
mxArray * layerLearn = NULL;
mxArray * inputLearn = NULL;
mclCopyInputArg(&net, net_in);
mclCopyArray(&x);
/*
* %SETX Set all network weight and bias values with a single vector.
* %
* % Syntax
* %
* % net = setx(net,X)
* %
* % Description
* %
* % This function sets a networks weight and biases to
* % a vector of values.
* %
* % NET = SETX(NET,X)
* % NET - Neural network.
* % X - Vector of weight and bias values.
* %
* % Examples
* %
* % Here we create a network with a 2-element input, and one
* % layer of 3 neurons.
* %
* % net = newff([0 1; -1 1],[3]);
* %
* % The network has six weights (3 neurons * 2 input elements)
* % and three biases (3 neurons) for a total of 9 weight and bias
* % values. We can set them to random values as follows:
* %
* % net = setx(net,rand(9,1));
* %
* % We can then view the weight and bias values as follows:
* %
* % net.iw{1,1}
* % net.b{1}
* %
* % See also GETX, FORMX.
*
* % Mark Beale, 11-31-97
* % Mark Beale, Updated help, 5-25-98
* % Copyright 1992-2002 The MathWorks, Inc.
* % $Revision: 1.8 $ $Date: 2002/03/25 16:55:02 $
*
* % Shortcuts
* inputLearn = net.hint.inputLearn;
*/
mlfAssign(&inputLearn, mlfIndexRef(mclVa(net, "net"), ".hint.inputLearn"));
/*
* layerLearn = net.hint.layerLearn;
*/
mlfAssign(&layerLearn, mlfIndexRef(mclVa(net, "net"), ".hint.layerLearn"));
/*
* biasLearn = net.hint.biasLearn;
*/
mlfAssign(&biasLearn, mlfIndexRef(mclVa(net, "net"), ".hint.biasLearn"));
/*
* inputWeightInd = net.hint.inputWeightInd;
*/
mlfAssign(
&inputWeightInd, mlfIndexRef(mclVa(net, "net"), ".hint.inputWeightInd"));
/*
* layerWeightInd = net.hint.layerWeightInd;
*/
mlfAssign(
&layerWeightInd, mlfIndexRef(mclVa(net, "net"), ".hint.layerWeightInd"));
/*
* biasInd = net.hint.biasInd;
*/
mlfAssign(&biasInd, mlfIndexRef(mclVa(net, "net"), ".hint.biasInd"));
/*
*
* for i=1:net.numLayers
*/
{
mclForLoopIterator viter__;
for (mclForStart(
&viter__,
mclFeval(
mclValueVarargout(),
mlxColon,
_mxarray0_,
mlfIndexRef(mclVa(net, "net"), ".numLayers"),
NULL),
NULL,
NULL);
mclForNext(&viter__, &i);
) {
mclForLoopIterator viter__0;
/*
* for j=find(inputLearn(i,:))
*/
for (mclForStart(
&viter__0,
mlfFind(
NULL,
NULL,
mclArrayRef2(
mclVv(inputLearn, "inputLearn"),
mclVv(i, "i"),
mlfCreateColonIndex())),
NULL,
NULL);
mclForNext(&viter__0, &j);
) {
/*
* net.IW{i,j}(:) = x(inputWeightInd{i,j});
*/
mlfIndexAssign(
&net,
".IW{?,?}(?)",
mclVv(i, "i"),
mclVv(j, "j"),
mlfCreateColonIndex(),
mclArrayRef1(
mclVa(x, "x"),
mlfIndexRef(
mclVv(inputWeightInd, "inputWeightInd"),
"{?,?}",
mclVv(i, "i"),
mclVv(j, "j"))));
/*
* end
*/
}
mclDestroyForLoopIterator(viter__0);
/*
* for j=find(layerLearn(i,:))
*/
{
mclForLoopIterator viter__1;
for (mclForStart(
&viter__1,
mlfFind(
NULL,
NULL,
mclArrayRef2(
mclVv(layerLearn, "layerLearn"),
mclVv(i, "i"),
mlfCreateColonIndex())),
NULL,
NULL);
mclForNext(&viter__1, &j);
) {
/*
* net.LW{i,j}(:) = x(layerWeightInd{i,j});
*/
mlfIndexAssign(
&net,
".LW{?,?}(?)",
mclVv(i, "i"),
mclVv(j, "j"),
mlfCreateColonIndex(),
mclArrayRef1(
mclVa(x, "x"),
mlfIndexRef(
mclVv(layerWeightInd, "layerWeightInd"),
"{?,?}",
mclVv(i, "i"),
mclVv(j, "j"))));
/*
* end
*/
}
mclDestroyForLoopIterator(viter__1);
}
/*
* if biasLearn(i)
*/
if (mlfTobool(
mclArrayRef1(mclVv(biasLearn, "biasLearn"), mclVv(i, "i")))) {
/*
* net.b{i}(:) = x(biasInd{i});
*/
mlfIndexAssign(
&net,
".b{?}(?)",
mclVv(i, "i"),
mlfCreateColonIndex(),
mclArrayRef1(
mclVa(x, "x"),
mlfIndexRef(
mclVv(biasInd, "biasInd"), "{?}", mclVv(i, "i"))));
/*
* end
*/
}
/*
* end
*/
}
mclDestroyForLoopIterator(viter__);
}
mclValidateOutput(net, 1, nargout_, "net", "setx");
mxDestroyArray(inputLearn);
mxDestroyArray(layerLearn);
mxDestroyArray(biasLearn);
mxDestroyArray(inputWeightInd);
mxDestroyArray(layerWeightInd);
mxDestroyArray(biasInd);
mxDestroyArray(i);
mxDestroyArray(j);
mxDestroyArray(x);
mclSetCurrentLocalFunctionTable(save_local_function_table_);
return net;
}
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?