⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 calcperf.c

📁 nnToolKit 神经网络工具包是基于 MATLAB 神经网络工具箱自行开发的一组神经网络算法函数库
💻 C
📖 第 1 页 / 共 4 页
字号:
    mxArray * IW = NULL;
    mxArray * layerDelays = NULL;
    mxArray * transferFcn = NULL;
    mxArray * netInputFcn = NULL;
    mxArray * layerWeightFcn = NULL;
    mxArray * inputWeightFcn = NULL;
    mxArray * biasConnectFrom = NULL;
    mxArray * layerConnectFrom = NULL;
    mxArray * inputConnectFrom = NULL;
    mxArray * numLayerDelays = NULL;
    mxArray * i = NULL;
    mxArray * ones1xQ = NULL;
    mclCopyArray(&net);
    mclCopyArray(&X);
    mclCopyArray(&PD);
    mclCopyArray(&T);
    mclCopyArray(&Ai);
    mclCopyArray(&Q);
    mclCopyArray(&TS);
    /*
     * %CALCPERF Calculate network outputs, signals, and performance.
     * %
     * %  Synopsis
     * %
     * %    [perf,El,Ac,N,BZ,IWZ,LWZ]=calcperf(net,X,Pd,Tl,Ai,Q,TS)
     * %
     * %  Description
     * %
     * %    This function calculates the outputs of each layer in
     * %    response to a networks delayed inputs and initial layer
     * %    delay conditions.
     * %
     * %    [perf,El,Ac,N,LWZ,IWZ,BZ] = CALCPERF(NET,X,Pd,Tl,Ai,Q,TS) takes,
     * %      NET - Neural network.
     * %      X   - Network weight and bias values in a single vector.
     * %      Pd  - Delayed inputs.
     * %      Tl  - Layer targets.
     * %      Ai  - Initial layer delay conditions.
     * %      Q   - Concurrent size.
     * %      TS  - Time steps.
     * %    and returns,
     * %      perf - Network performance.
     * %      El   - Layer errors.
     * %      Ac   - Combined layer outputs = [Ai, calculated layer outputs].
     * %      N    - Net inputs.
     * %      LWZ  - Weighted layer outputs.
     * %      IWZ  - Weighted inputs.
     * %      BZ   - Concurrent biases.
     * %
     * %  Examples
     * %
     * %    Here we create a linear network with a single input element
     * %    ranging from 0 to 1, two neurons, and a tap delay on the
     * %    input with taps at 0, 2, and 4 timesteps.  The network is
     * %    also given a recurrent connection from layer 1 to itself with
     * %    tap delays of [1 2].
     * %
     * %      net = newlin([0 1],2);
     * %      net.layerConnect(1,1) = 1;
     * %      net.layerWeights{1,1}.delays = [1 2];
     * %
     * %    Here is a single (Q = 1) input sequence P with 5 timesteps (TS = 5),
     * %    and the 4 initial input delay conditions Pi, combined inputs Pc,
     * %    and delayed inputs Pd.
     * %
     * %      P = {0 0.1 0.3 0.6 0.4};
     * %      Pi = {0.2 0.3 0.4 0.1};
     * %      Pc = [Pi P];
     * %      Pd = calcpd(net,5,1,Pc);
     * %
     * %    Here the two initial layer delay conditions for each of the
     * %    two neurons are defined.
     * %
     * %      Ai = {[0.5; 0.1] [0.6; 0.5]};
     * %
     * %    Here we define the layer targets for the two neurons for
     * %    each of the five time steps.
     * %    
     * %      Tl = {[0.1;0.2] [0.3;0.1], [0.5;0.6] [0.8;0.9], [0.5;0.1]};
     * %
     * %    Here the network's weight and bias values are extracted.
     * %
     * %      X = getx(net);
     * %
     * %    Here we calculate the network's combined outputs Ac, and other
     * %    signals described above..
     * %
     * %      [perf,El,Ac,N,BZ,IWZ,LWZ] = calcperf(net,X,Pd,Tl,Ai,1,5)
     * 
     * % Mark Beale, 11-31-97
     * % Mark Beale, Updated help, 5-25-98
     * % Copyright 1992-2002 The MathWorks, Inc.
     * % $Revision: 1.9 $ $Date: 2002/03/25 16:54:58 $
     * 
     * % CALCA: [Ac,N,LWZ,IWZ,BZ] = calca(net,PD,Ai,Q,TS)
     * %=================================================
     * 
     * % Concurrent biases
     * BZ = cell(net.numLayers,1);
     */
    mlfAssign(
      BZ,
      mlfCell(mlfIndexRef(mclVa(net, "net"), ".numLayers"), _mxarray0_, NULL));
    /*
     * ones1xQ = ones(1,Q);
     */
    mlfAssign(&ones1xQ, mlfOnes(_mxarray0_, mclVa(Q, "Q"), NULL));
    /*
     * for i=net.hint.biasConnectTo
     */
    {
        mclForLoopIterator viter__;
        for (mclForStart(
               &viter__,
               mlfIndexRef(mclVa(net, "net"), ".hint.biasConnectTo"),
               NULL,
               NULL);
             mclForNext(&viter__, &i);
             ) {
            /*
             * BZ{i} = net.b{i}(:,ones1xQ);
             */
            mlfIndexAssign(
              BZ,
              "{?}",
              mclVv(i, "i"),
              mlfIndexRef(
                mclVa(net, "net"),
                ".b{?}(?,?)",
                mclVv(i, "i"),
                mlfCreateColonIndex(),
                mclVv(ones1xQ, "ones1xQ")));
        /*
         * end
         */
        }
        mclDestroyForLoopIterator(viter__);
    }
    /*
     * 
     * % Signals
     * IWZ = cell(net.numLayers,net.numInputs,TS);
     */
    mlfAssign(
      IWZ,
      mlfCell(
        mlfIndexRef(mclVa(net, "net"), ".numLayers"),
        mlfIndexRef(mclVa(net, "net"), ".numInputs"),
        mclVa(TS, "TS"),
        NULL));
    /*
     * LWZ = cell(net.numLayers,net.numLayers,TS);
     */
    mlfAssign(
      LWZ,
      mlfCell(
        mlfIndexRef(mclVa(net, "net"), ".numLayers"),
        mlfIndexRef(mclVa(net, "net"), ".numLayers"),
        mclVa(TS, "TS"),
        NULL));
    /*
     * Ac = [Ai cell(net.numLayers,TS)];
     */
    mlfAssign(
      Ac,
      mlfHorzcat(
        mclVa(Ai, "Ai"),
        mlfCell(
          mlfIndexRef(mclVa(net, "net"), ".numLayers"), mclVa(TS, "TS"), NULL),
        NULL));
    /*
     * N = cell(net.numLayers,TS);
     */
    mlfAssign(
      N,
      mlfCell(
        mlfIndexRef(mclVa(net, "net"), ".numLayers"), mclVa(TS, "TS"), NULL));
    /*
     * 
     * % Shortcuts
     * numLayerDelays = net.numLayerDelays;
     */
    mlfAssign(
      &numLayerDelays, mlfIndexRef(mclVa(net, "net"), ".numLayerDelays"));
    /*
     * inputConnectFrom = net.hint.inputConnectFrom;
     */
    mlfAssign(
      &inputConnectFrom,
      mlfIndexRef(mclVa(net, "net"), ".hint.inputConnectFrom"));
    /*
     * layerConnectFrom = net.hint.layerConnectFrom;
     */
    mlfAssign(
      &layerConnectFrom,
      mlfIndexRef(mclVa(net, "net"), ".hint.layerConnectFrom"));
    /*
     * biasConnectFrom = net.hint.biasConnectFrom;
     */
    mlfAssign(
      &biasConnectFrom,
      mlfIndexRef(mclVa(net, "net"), ".hint.biasConnectFrom"));
    /*
     * inputWeightFcn = net.hint.inputWeightFcn;
     */
    mlfAssign(
      &inputWeightFcn, mlfIndexRef(mclVa(net, "net"), ".hint.inputWeightFcn"));
    /*
     * layerWeightFcn = net.hint.layerWeightFcn;
     */
    mlfAssign(
      &layerWeightFcn, mlfIndexRef(mclVa(net, "net"), ".hint.layerWeightFcn"));
    /*
     * netInputFcn = net.hint.netInputFcn;
     */
    mlfAssign(
      &netInputFcn, mlfIndexRef(mclVa(net, "net"), ".hint.netInputFcn"));
    /*
     * transferFcn = net.hint.transferFcn;
     */
    mlfAssign(
      &transferFcn, mlfIndexRef(mclVa(net, "net"), ".hint.transferFcn"));
    /*
     * layerDelays = net.hint.layerDelays;
     */
    mlfAssign(
      &layerDelays, mlfIndexRef(mclVa(net, "net"), ".hint.layerDelays"));
    /*
     * IW = net.IW;
     */
    mlfAssign(&IW, mlfIndexRef(mclVa(net, "net"), ".IW"));
    /*
     * LW = net.LW;
     */
    mlfAssign(&LW, mlfIndexRef(mclVa(net, "net"), ".LW"));
    /*
     * 
     * % Simulation
     * for ts=1:TS
     */
    {
        int v_ = mclForIntStart(1);
        int e_ = mclForIntEnd(mclVa(TS, "TS"));
        if (v_ > e_) {
            mlfAssign(&ts, _mxarray1_);
        } else {
            /*
             * for i=net.hint.simLayerOrder
             * 
             * ts2 = numLayerDelays + ts;
             * 
             * % Input Weights -> Weighed Inputs
             * inputInds = inputConnectFrom{i};
             * for j=inputInds
             * switch inputWeightFcn{i,j}
             * case 'dotprod'
             * IWZ{i,j,ts} = IW{i,j} * PD{i,j,ts};
             * otherwise
             * IWZ{i,j,ts} = feval(inputWeightFcn{i,j},IW{i,j},PD{i,j,ts});
             * end
             * end
             * 
             * % Layer Weights -> Weighted Layer Outputs
             * layerInds = layerConnectFrom{i};
             * for j=layerInds
             * thisLayerDelays = layerDelays{i,j};
             * if (length(thisLayerDelays) == 1) & (thisLayerDelays == 0)
             * Ad = Ac{j,ts2};
             * else
             * Ad = cell2mat(Ac(j,ts2-layerDelays{i,j})');
             * end
             * switch layerWeightFcn{i,j}
             * case 'dotprod'
             * LWZ{i,j,ts} = LW{i,j} * Ad;
             * otherwise
             * LWZ{i,j,ts} = feval(layerWeightFcn{i,j},LW{i,j},Ad);
             * end
             * end
             * 
             * % Net Input Function -> Net Input
             * if net.biasConnect(i)
             * Z = [IWZ(i,inputInds,ts) LWZ(i,layerInds,ts) BZ(i)];
             * else
             * Z = [IWZ(i,inputInds,ts) LWZ(i,layerInds,ts)];
             * end
             * switch netInputFcn{i}
             * case 'netsum'
             * N{i,ts} = Z{1};
             * for k=2:length(Z)
             * N{i,ts} = N{i,ts} + Z{k};
             * end
             * case 'netprod'
             * N{i,ts} = Z{1};
             * for k=2:length(Z)
             * N{i,ts} = N{i,ts} .* Z{k};
             * end
             * otherwise
             * N{i,ts} = feval(netInputFcn{i},Z{:});
             * end
             * 
             * % Transfer Function -> Layer Output
             * switch transferFcn{i}
             * case 'purelin'
             * Ac{i,ts2} = N{i,ts};
             * case 'tansig'
             * n = N{i,ts};
             * a = 2 ./ (1 + exp(-2*n)) - 1;
             * k = find(~finite(a));
             * a(k) = sign(n(k));
             * Ac{i,ts2} = a;
             * case 'logsig'

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -