⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 learnis.c

📁 nnToolKit 神经网络工具包是基于 MATLAB 神经网络工具箱自行开发的一组神经网络算法函数库
💻 C
📖 第 1 页 / 共 2 页
字号:
     * %
     * %  Syntax
     * %  
     * %    [dW,LS] = learnis(W,P,Z,N,A,T,E,gW,gA,D,LP,LS)
     * %    info = learnis(code)
     * %
     * %  Description
     * %
     * %    LEARNIS is the instar weight learning function.
     * %
     * %    LEARNIS(W,P,Z,N,A,T,E,gW,gA,D,LP,LS) takes several inputs,
     * %      W  - SxR weight matrix (or Sx1 bias vector).
     * %      P  - RxQ input vectors (or ones(1,Q)).
     * %      Z  - SxQ weighted input vectors.
     * %      N  - SxQ net input vectors.
     * %      A  - SxQ output vectors.
     * %      T  - SxQ layer target vectors.
     * %      E  - SxQ layer error vectors.
     * %      gW - SxR gradient with respect to performance.
     * %      gA - SxQ output gradient with respect to performance.
     * %      D  - SxS neuron distances.
     * %      LP - Learning parameters, none, LP = [].
     * %      LS - Learning state, initially should be = [].
     * %    and returns,
     * %      dW - SxR weight (or bias) change matrix.
     * %      LS - New learning state.
     * %
     * %    Learning occurs according to LEARNIS's learning parameter,
     * %    shown here with its default value.
     * %      LP.lr - 0.01 - Learning rate
     * %
     * %    LEARNIS(CODE) returns useful information for each CODE string:
     * %      'pnames'    - Returns names of learning parameters.
     * %      'pdefaults' - Returns default learning parameters.
     * %      'needg'     - Returns 1 if this function uses gW or gA.
     * %
     * %  Examples
     * %
     * %    Here we define a random input P, output A, and weight matrix W
     * %    for a layer with a 2-element input and 3 neurons.  We also define
     * %    the learning rate LR.
     * %
     * %      p = rand(2,1);
     * %      a = rand(3,1);
     * %      w = rand(3,2);
     * %      lp.lr = 0.5;
     * %
     * %    Since LEARNIS only needs these values to calculate a weight
     * %    change (see Algorithm below), we will use them to do so.
     * %
     * %      dW = learnis(w,p,[],[],a,[],[],[],[],[],lp,[])
     * %
     * %  Network Use
     * %
     * %    To prepare the weights and the bias of layer i of a custom network
     * %    so that it can learn with LEARNIS:
     * %    1) Set NET.trainFcn to 'trainr'.
     * %       (NET.trainParam will automatically become TRAINR's default parameters.)
     * %    2) Set NET.adaptFcn to 'trains'.
     * %       (NET.adaptParam will automatically become TRAINS's default parameters.)
     * %    3) Set each NET.inputWeights{i,j}.learnFcn to 'learnis'.
     * %       Set each NET.layerWeights{i,j}.learnFcn to 'learnis'.
     * %       (Each weight learning parameter property will automatically
     * %       be set to LEARNIS's default parameters.)
     * %
     * %    To train the network (or enable it to adapt):
     * %    1) Set NET.trainParam (NET.adaptParam) properties to desired values.
     * %    2) Call TRAIN (ADAPT).
     * %
     * %  Algorithm
     * %
     * %    LEARNIS calculates the weight change dW for a given neuron from the
     * %    neuron's input P, output A, and learning rate LR according to the
     * %    instar learning rule:
     * %
     * %      dw =  lr*a*(p'-w)
     * %
     * %  See also LEARNK, LEARNOS, ADAPT, TRAIN.
     * 
     * % Mark Beale, 1-31-92
     * % Revised 12-15-93, MB
     * % Revised 11-31-97, MB
     * % Copyright 1992-2002 The MathWorks, Inc.
     * % $Revision: 1.11 $  $Date: 2002/03/25 16:52:36 $
     * 
     * % **[ NNT2 Support ]**
     * if nargin == 4
     */
    if (nargin_ == 4) {
        /*
         * nntobsu('learnis','See help on LEARNIS for new argument list.')
         */
        mlfNntobsu(_mxarray0_, _mxarray2_, NULL);
        /*
         * lp.lr = n; a = z;
         */
        mlfIndexAssign(&lp, ".lr", mclVa(n, "n"));
        mlfAssign(&a, mclVa(z, "z"));
        /*
         * dw = learnis(w,p,[],[],a,[],[],[],[],[],lp,[]);
         */
        mlfAssign(
          &dw,
          mlfLearnis(
            NULL,
            mclVa(w, "w"),
            mclVa(p, "p"),
            _mxarray4_,
            _mxarray4_,
            mclVa(a, "a"),
            _mxarray4_,
            _mxarray4_,
            _mxarray4_,
            _mxarray4_,
            _mxarray4_,
            mclVa(lp, "lp"),
            _mxarray4_));
        /*
         * return
         */
        goto return_;
    /*
     * end
     */
    }
    /*
     * 
     * % FUNCTION INFO
     * % =============
     * if isstr(w)
     */
    if (mlfTobool(mlfIsstr(mclVa(w, "w")))) {
        /*
         * switch lower(w)
         */
        mxArray * v_ = mclInitialize(mlfLower(mclVa(w, "w")));
        if (mclSwitchCompare(v_, _mxarray5_)) {
            /*
             * case 'pnames'
             * dw = {'lr'};
             */
            mlfAssign(&dw, _mxarray7_);
        /*
         * case 'pdefaults'
         */
        } else if (mclSwitchCompare(v_, _mxarray10_)) {
            /*
             * lp.lr = 0.5;
             */
            mlfIndexAssign(&lp, ".lr", _mxarray12_);
            /*
             * dw = lp;
             */
            mlfAssign(&dw, mclVa(lp, "lp"));
        /*
         * case 'needg'
         */
        } else if (mclSwitchCompare(v_, _mxarray13_)) {
            /*
             * dw = 0;
             */
            mlfAssign(&dw, _mxarray15_);
        /*
         * otherwise
         */
        } else {
            /*
             * error('Unrecognized property.')
             */
            mlfError(_mxarray16_, NULL);
        /*
         * end
         */
        }
        mxDestroyArray(v_);
        /*
         * return
         */
        goto return_;
    /*
     * end
     */
    }
    /*
     * 
     * % CALCULATION
     * % ===========
     * 
     * [S,R] = size(w);
     */
    mlfSize(mlfVarargout(&S, &R, NULL), mclVa(w, "w"), NULL);
    /*
     * Q = size(p,2);
     */
    mlfAssign(&Q, mlfSize(mclValueVarargout(), mclVa(p, "p"), _mxarray18_));
    /*
     * pt = p';
     */
    mlfAssign(&pt, mlfCtranspose(mclVa(p, "p")));
    /*
     * lr_a = lp.lr * a;
     */
    mlfAssign(
      &lr_a,
      mclFeval(
        mclValueVarargout(),
        mlxMtimes,
        mlfIndexRef(mclVa(lp, "lp"), ".lr"),
        mclVa(a, "a"),
        NULL));
    /*
     * dw = zeros(S,R);
     */
    mlfAssign(&dw, mlfZeros(mclVv(S, "S"), mclVv(R, "R"), NULL));
    /*
     * for q=1:Q
     */
    {
        int v_ = mclForIntStart(1);
        int e_ = mclForIntEnd(mclVv(Q, "Q"));
        if (v_ > e_) {
            mlfAssign(&q, _mxarray4_);
        } else {
            /*
             * dw = dw + lr_a(:,q+zeros(1,R)) .* (pt(q+zeros(S,1),:)-w);
             * end
             */
            for (; ; ) {
                mlfAssign(
                  &dw,
                  mclPlus(
                    mclVv(dw, "dw"),
                    mclTimes(
                      mclArrayRef2(
                        mclVv(lr_a, "lr_a"),
                        mlfCreateColonIndex(),
                        mclPlus(
                          mlfScalar(v_),
                          mlfZeros(_mxarray19_, mclVv(R, "R"), NULL))),
                      mclMinus(
                        mclArrayRef2(
                          mclVv(pt, "pt"),
                          mclPlus(
                            mlfScalar(v_),
                            mlfZeros(mclVv(S, "S"), _mxarray19_, NULL)),
                          mlfCreateColonIndex()),
                        mclVa(w, "w")))));
                if (v_ == e_) {
                    break;
                }
                ++v_;
            }
            mlfAssign(&q, mlfScalar(v_));
        }
    }
    return_:
    mclValidateOutput(dw, 1, nargout_, "dw", "learnis");
    mclValidateOutput(*ls, 2, nargout_, "ls", "learnis");
    mxDestroyArray(ans);
    mxDestroyArray(S);
    mxDestroyArray(R);
    mxDestroyArray(Q);
    mxDestroyArray(pt);
    mxDestroyArray(lr_a);
    mxDestroyArray(q);
    mxDestroyArray(lp);
    mxDestroyArray(d);
    mxDestroyArray(gA);
    mxDestroyArray(gW);
    mxDestroyArray(e);
    mxDestroyArray(t);
    mxDestroyArray(a);
    mxDestroyArray(n);
    mxDestroyArray(z);
    mxDestroyArray(p);
    mxDestroyArray(w);
    mclSetCurrentLocalFunctionTable(save_local_function_table_);
    return dw;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -