⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 getx.c

📁 nnToolKit 神经网络工具包是基于 MATLAB 神经网络工具箱自行开发的一组神经网络算法函数库
💻 C
字号:
/*
 * MATLAB Compiler: 3.0
 * Date: Sun May 13 16:47:40 2007
 * Arguments: "-B" "macro_default" "-O" "all" "-O" "fold_scalar_mxarrays:on"
 * "-O" "fold_non_scalar_mxarrays:on" "-O" "optimize_integer_for_loops:on" "-O"
 * "array_indexing:on" "-O" "optimize_conditionals:on" "-M" "-silentsetup" "-d"
 * "d:/MATLAB6p5/work/nnToolKit/src" "-B" "csglcom:nnToolKit,nnToolKit,2.0"
 * "-B" "sgl" "-m" "-W" "main" "-L" "C" "-t" "-T" "link:exe" "-h"
 * "libmmfile.mlib" "-W" "mainhg" "libmwsglm.mlib" "-t" "-W"
 * "comhg:nnToolKit,nnToolKit,2.0" "-T" "link:lib" "-h" "libmmfile.mlib" "-i"
 * "-i" "D:/MATLAB6p5/work/nnToolKit/lmnet/LmSimu.m"
 * "D:/MATLAB6p5/work/nnToolKit/lmnet/LmTrain.m"
 * "D:/MATLAB6p5/work/nnToolKit/sofm/SofmSimu.m"
 * "D:/MATLAB6p5/work/nnToolKit/sofm/SofmTrain.m" 
 */
#include "getx.h"
#include "libmatlbm.h"
static mxArray * _mxarray0_;

void InitializeModule_getx(void) {
    _mxarray0_ = mclInitializeDouble(1.0);
}

void TerminateModule_getx(void) {
    mxDestroyArray(_mxarray0_);
}

static mxArray * Mgetx(int nargout_, mxArray * net);

_mexLocalFunctionTable _local_function_table_getx
  = { 0, (mexFunctionTableEntry *)NULL };

/*
 * The function "mlfGetx" contains the normal interface for the "getx"
 * M-function from file "d:\matlab6p5\toolbox\nnet\nnutils\getx.m" (lines
 * 1-60). This function processes any input arguments and passes them to the
 * implementation version of the function, appearing above.
 */
mxArray * mlfGetx(mxArray * net) {
    int nargout = 1;
    mxArray * x = NULL;
    mlfEnterNewContext(0, 1, net);
    x = Mgetx(nargout, net);
    mlfRestorePreviousContext(0, 1, net);
    return mlfReturnValue(x);
}

/*
 * The function "mlxGetx" contains the feval interface for the "getx"
 * M-function from file "d:\matlab6p5\toolbox\nnet\nnutils\getx.m" (lines
 * 1-60). The feval function calls the implementation version of getx through
 * this function. This function processes any input arguments and passes them
 * to the implementation version of the function, appearing above.
 */
void mlxGetx(int nlhs, mxArray * plhs[], int nrhs, mxArray * prhs[]) {
    mxArray * mprhs[1];
    mxArray * mplhs[1];
    int i;
    if (nlhs > 1) {
        mlfError(
          mxCreateString(
            "Run-time Error: File: getx Line: 1 Column: 1 The function \"getx\""
            " was called with more than the declared number of outputs (1)."),
          NULL);
    }
    if (nrhs > 1) {
        mlfError(
          mxCreateString(
            "Run-time Error: File: getx Line: 1 Column: 1 The function \"getx"
            "\" was called with more than the declared number of inputs (1)."),
          NULL);
    }
    for (i = 0; i < 1; ++i) {
        mplhs[i] = NULL;
    }
    for (i = 0; i < 1 && i < nrhs; ++i) {
        mprhs[i] = prhs[i];
    }
    for (; i < 1; ++i) {
        mprhs[i] = NULL;
    }
    mlfEnterNewContext(0, 1, mprhs[0]);
    mplhs[0] = Mgetx(nlhs, mprhs[0]);
    mlfRestorePreviousContext(0, 1, mprhs[0]);
    plhs[0] = mplhs[0];
}

/*
 * The function "Mgetx" is the implementation version of the "getx" M-function
 * from file "d:\matlab6p5\toolbox\nnet\nnutils\getx.m" (lines 1-60). It
 * contains the actual compiled code for that M-function. It is a static
 * function and must only be called from one of the interface functions,
 * appearing below.
 */
/*
 * function x=getx(net)
 */
static mxArray * Mgetx(int nargout_, mxArray * net) {
    mexLocalFunctionTable save_local_function_table_
      = mclSetCurrentLocalFunctionTable(&_local_function_table_getx);
    mxArray * x = NULL;
    mxArray * j = NULL;
    mxArray * i = NULL;
    mxArray * biasInd = NULL;
    mxArray * layerWeightInd = NULL;
    mxArray * inputWeightInd = NULL;
    mxArray * biasLearn = NULL;
    mxArray * layerLearn = NULL;
    mxArray * inputLearn = NULL;
    mclCopyArray(&net);
    /*
     * %GETX Get all network weight and bias values as a single vector.
     * %
     * %  Syntax
     * %
     * %    X = getx(net)
     * %
     * %  Description
     * %
     * %    This function gets a networks weight and biases as
     * %    a vector of values.
     * %
     * %    X = GETX(NET)
     * %      NET - Neural network.
     * %      X   - Vector of weight and bias values.
     * %
     * %  Examples
     * %
     * %    Here we create a network with a 2-element input, and one
     * %    layer of 3 neurons.
     * %
     * %      net = newff([0 1; -1 1],[3]);
     * %
     * %    We can get its weight and bias values as follows:
     * %
     * %      net.iw{1,1}
     * %      net.b{1}
     * %
     * %    We can get these values as a single vector as follows:
     * %
     * %      x = getx(net);
     * %
     * %  See also SETX, FORMX.
     * 
     * % Mark Beale, 11-31-97
     * % Mark Beale, Updated help, 5-25-98
     * % Copyright 1992-2002 The MathWorks, Inc.
     * % $Revision: 1.8 $ $Date: 2002/03/25 16:55:01 $
     * 
     * % Shortcuts
     * inputLearn = net.hint.inputLearn;
     */
    mlfAssign(&inputLearn, mlfIndexRef(mclVa(net, "net"), ".hint.inputLearn"));
    /*
     * layerLearn = net.hint.layerLearn;
     */
    mlfAssign(&layerLearn, mlfIndexRef(mclVa(net, "net"), ".hint.layerLearn"));
    /*
     * biasLearn = net.hint.biasLearn;
     */
    mlfAssign(&biasLearn, mlfIndexRef(mclVa(net, "net"), ".hint.biasLearn"));
    /*
     * inputWeightInd = net.hint.inputWeightInd;
     */
    mlfAssign(
      &inputWeightInd, mlfIndexRef(mclVa(net, "net"), ".hint.inputWeightInd"));
    /*
     * layerWeightInd = net.hint.layerWeightInd;
     */
    mlfAssign(
      &layerWeightInd, mlfIndexRef(mclVa(net, "net"), ".hint.layerWeightInd"));
    /*
     * biasInd = net.hint.biasInd;
     */
    mlfAssign(&biasInd, mlfIndexRef(mclVa(net, "net"), ".hint.biasInd"));
    /*
     * 
     * x = zeros(net.hint.xLen,1);
     */
    mlfAssign(
      &x,
      mlfZeros(mlfIndexRef(mclVa(net, "net"), ".hint.xLen"), _mxarray0_, NULL));
    /*
     * for i=1:net.numLayers
     */
    {
        mclForLoopIterator viter__;
        for (mclForStart(
               &viter__,
               mclFeval(
                 mclValueVarargout(),
                 mlxColon,
                 _mxarray0_,
                 mlfIndexRef(mclVa(net, "net"), ".numLayers"),
                 NULL),
               NULL,
               NULL);
             mclForNext(&viter__, &i);
             ) {
            mclForLoopIterator viter__0;
            /*
             * for j=find(inputLearn(i,:))
             */
            for (mclForStart(
                   &viter__0,
                   mlfFind(
                     NULL,
                     NULL,
                     mclArrayRef2(
                       mclVv(inputLearn, "inputLearn"),
                       mclVv(i, "i"),
                       mlfCreateColonIndex())),
                   NULL,
                   NULL);
                 mclForNext(&viter__0, &j);
                 ) {
                /*
                 * x(inputWeightInd{i,j}) = net.IW{i,j}(:);
                 */
                mclArrayAssign1(
                  &x,
                  mlfIndexRef(
                    mclVa(net, "net"),
                    ".IW{?,?}(?)",
                    mclVv(i, "i"),
                    mclVv(j, "j"),
                    mlfCreateColonIndex()),
                  mlfIndexRef(
                    mclVv(inputWeightInd, "inputWeightInd"),
                    "{?,?}",
                    mclVv(i, "i"),
                    mclVv(j, "j")));
            /*
             * end
             */
            }
            mclDestroyForLoopIterator(viter__0);
            /*
             * for j=find(layerLearn(i,:))
             */
            {
                mclForLoopIterator viter__1;
                for (mclForStart(
                       &viter__1,
                       mlfFind(
                         NULL,
                         NULL,
                         mclArrayRef2(
                           mclVv(layerLearn, "layerLearn"),
                           mclVv(i, "i"),
                           mlfCreateColonIndex())),
                       NULL,
                       NULL);
                     mclForNext(&viter__1, &j);
                     ) {
                    /*
                     * x(layerWeightInd{i,j}) = net.LW{i,j}(:);
                     */
                    mclArrayAssign1(
                      &x,
                      mlfIndexRef(
                        mclVa(net, "net"),
                        ".LW{?,?}(?)",
                        mclVv(i, "i"),
                        mclVv(j, "j"),
                        mlfCreateColonIndex()),
                      mlfIndexRef(
                        mclVv(layerWeightInd, "layerWeightInd"),
                        "{?,?}",
                        mclVv(i, "i"),
                        mclVv(j, "j")));
                /*
                 * end
                 */
                }
                mclDestroyForLoopIterator(viter__1);
            }
            /*
             * if biasLearn(i)
             */
            if (mlfTobool(
                  mclArrayRef1(mclVv(biasLearn, "biasLearn"), mclVv(i, "i")))) {
                /*
                 * x(biasInd{i}) = net.b{i};
                 */
                mclArrayAssign1(
                  &x,
                  mlfIndexRef(mclVa(net, "net"), ".b{?}", mclVv(i, "i")),
                  mlfIndexRef(mclVv(biasInd, "biasInd"), "{?}", mclVv(i, "i")));
            /*
             * end
             */
            }
        /*
         * end
         */
        }
        mclDestroyForLoopIterator(viter__);
    }
    mclValidateOutput(x, 1, nargout_, "x", "getx");
    mxDestroyArray(inputLearn);
    mxDestroyArray(layerLearn);
    mxDestroyArray(biasLearn);
    mxDestroyArray(inputWeightInd);
    mxDestroyArray(layerWeightInd);
    mxDestroyArray(biasInd);
    mxDestroyArray(i);
    mxDestroyArray(j);
    mxDestroyArray(net);
    mclSetCurrentLocalFunctionTable(save_local_function_table_);
    return x;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -