⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 tlm3.c

📁 nnToolKit 神经网络工具包是基于 MATLAB 神经网络工具箱自行开发的一组神经网络算法函数库
💻 C
📖 第 1 页 / 共 3 页
字号:
                       mxArray * * i,
                       mxArray * * tr,
                       int nargout_,
                       mxArray * w1_in,
                       mxArray * b1_in,
                       mxArray * f1,
                       mxArray * w2_in,
                       mxArray * b2_in,
                       mxArray * f2,
                       mxArray * w3_in,
                       mxArray * b3_in,
                       mxArray * f3,
                       mxArray * p,
                       mxArray * t,
                       mxArray * tp) {
    mexLocalFunctionTable save_local_function_table_
      = mclSetCurrentLocalFunctionTable(&_local_function_table_tlm3);
    int nargin_
      = mclNargin(
          12,
          w1_in,
          b1_in,
          f1,
          w2_in,
          b2_in,
          f2,
          w3_in,
          b3_in,
          f3,
          p,
          t,
          tp,
          NULL);
    mxArray * w1 = NULL;
    mxArray * new_SSE = NULL;
    mxArray * new_e = NULL;
    mxArray * new_b3 = NULL;
    mxArray * new_w3 = NULL;
    mxArray * new_b2 = NULL;
    mxArray * new_w2 = NULL;
    mxArray * new_b1 = NULL;
    mxArray * new_w1 = NULL;
    mxArray * dx = NULL;
    mxArray * jj = NULL;
    mxArray * grad = NULL;
    mxArray * je = NULL;
    mxArray * j = NULL;
    mxArray * j3 = NULL;
    mxArray * j2 = NULL;
    mxArray * j1 = NULL;
    mxArray * ext_d1 = NULL;
    mxArray * ext_d2 = NULL;
    mxArray * ext_d3 = NULL;
    mxArray * d3 = NULL;
    mxArray * ext_a2 = NULL;
    mxArray * ext_a1 = NULL;
    mxArray * mu = NULL;
    mxArray * h = NULL;
    mxArray * message = NULL;
    mxArray * plottype = NULL;
    mxArray * SSE = NULL;
    mxArray * e = NULL;
    mxArray * a3 = NULL;
    mxArray * a2 = NULL;
    mxArray * a1 = NULL;
    mxArray * ext_p = NULL;
    mxArray * db3 = NULL;
    mxArray * dw3 = NULL;
    mxArray * db2 = NULL;
    mxArray * dw2 = NULL;
    mxArray * db1 = NULL;
    mxArray * dw1 = NULL;
    mxArray * ii = NULL;
    mxArray * b3_ind = NULL;
    mxArray * w3_ind = NULL;
    mxArray * b2_ind = NULL;
    mxArray * w2_ind = NULL;
    mxArray * b1_ind = NULL;
    mxArray * w1_ind = NULL;
    mxArray * s3 = NULL;
    mxArray * s2 = NULL;
    mxArray * r = NULL;
    mxArray * s1 = NULL;
    mxArray * df3 = NULL;
    mxArray * df2 = NULL;
    mxArray * df1 = NULL;
    mxArray * mu_max = NULL;
    mxArray * mu_dec = NULL;
    mxArray * mu_inc = NULL;
    mxArray * mu_init = NULL;
    mxArray * grad_min = NULL;
    mxArray * eg = NULL;
    mxArray * me = NULL;
    mxArray * df = NULL;
    mxArray * ans = NULL;
    mclCopyInputArg(&w1, w1_in);
    mclCopyInputArg(b1, b1_in);
    mclCopyArray(&f1);
    mclCopyInputArg(w2, w2_in);
    mclCopyInputArg(b2, b2_in);
    mclCopyArray(&f2);
    mclCopyInputArg(w3, w3_in);
    mclCopyInputArg(b3, b3_in);
    mclCopyArray(&f3);
    mclCopyArray(&p);
    mclCopyArray(&t);
    mclCopyArray(&tp);
    /*
     * %TLM3 Train 3-layer feed-forward network w/Levenberg-Marquardt.
     * %
     * %  This function is obselete.
     * %  Use NNT2FF and TRAIN to update and train your network.
     * 
     * nntobsf('tlm3','Use NNT2FF and TRAIN to update and train your network.')
     */
    mlfNntobsf(_mxarray0_, _mxarray2_, NULL);
    /*
     * 
     * %       [W1,B1,W2,B2,W3,B3,TE,TR] = TLM3(W1,B1,F1,W2,B2,F2,W3,B3,F3,P,T)
     * %         Wi - Weight matrix of ith layer.
     * %         Bi - Bias vector of ith layer.
     * %         F  - Transfer function (string) of ith layer.
     * %         P  - RxQ matrix of input vectors.
     * %         T  - S2xQ matrix of target vectors.
     * %         TP - Training parameters (optional).
     * %       Returns:
     * %         Wi - new weights.
     * %         Bi - new biases.
     * %         TE - the actual number of epochs trained.
     * %         TR - training record: [row of errors]
     * %
     * %       Training parameters are:
     * %         TP(1) - Epochs between updating display, default = 25.
     * %         TP(2) - Maximum number of epochs to train, default = 1000.
     * %         TP(3) - Sum-squared error goal, default = 0.02.
     * %         TP(4) - Minimum gradient, default = 0.0001.
     * %         TP(5) - Initial value for MU, default = 0.001.
     * %         TP(6) - Multiplier for increasing MU, default = 10.
     * %         TP(7) - Multiplier for decreasing MU, default = 0.1.
     * %         TP(8) - Maximum value for MU, default = 1e10.
     * %       Missing parameters and NaN's are replaced with defaults.
     * 
     * % Mark Beale, 12-15-93
     * % Copyright 1992-2002 The MathWorks, Inc.
     * % $Revision: 1.11 $  $Date: 2002/03/25 16:54:24 $
     * 
     * if nargin < 11,error('Not enough arguments.'),end
     */
    if (nargin_ < 11) {
        mlfError(_mxarray4_, NULL);
    }
    /*
     * 
     * % TRAINING PARAMETERS
     * if nargin == 11, tp = []; end
     */
    if (nargin_ == 11) {
        mlfAssign(&tp, _mxarray6_);
    }
    /*
     * tp = nndef(tp,[25 1000 0.02 0.0001 0.001 10 0.1 1e10]);
     */
    mlfAssign(&tp, mlfNndef(mclVa(tp, "tp"), _mxarray7_));
    /*
     * df = tp(1);
     */
    mlfAssign(&df, mclIntArrayRef1(mclVa(tp, "tp"), 1));
    /*
     * me = tp(2);
     */
    mlfAssign(&me, mclIntArrayRef1(mclVa(tp, "tp"), 2));
    /*
     * eg = tp(3);
     */
    mlfAssign(&eg, mclIntArrayRef1(mclVa(tp, "tp"), 3));
    /*
     * grad_min = tp(4);
     */
    mlfAssign(&grad_min, mclIntArrayRef1(mclVa(tp, "tp"), 4));
    /*
     * mu_init = tp(5);
     */
    mlfAssign(&mu_init, mclIntArrayRef1(mclVa(tp, "tp"), 5));
    /*
     * mu_inc = tp(6);
     */
    mlfAssign(&mu_inc, mclIntArrayRef1(mclVa(tp, "tp"), 6));
    /*
     * mu_dec = tp(7);
     */
    mlfAssign(&mu_dec, mclIntArrayRef1(mclVa(tp, "tp"), 7));
    /*
     * mu_max = tp(8);
     */
    mlfAssign(&mu_max, mclIntArrayRef1(mclVa(tp, "tp"), 8));
    /*
     * df1 = feval(f1,'delta');
     */
    mlfAssign(
      &df1, mlfFeval(mclValueVarargout(), mclVa(f1, "f1"), _mxarray9_, NULL));
    /*
     * df2 = feval(f2,'delta');
     */
    mlfAssign(
      &df2, mlfFeval(mclValueVarargout(), mclVa(f2, "f2"), _mxarray9_, NULL));
    /*
     * df3 = feval(f3,'delta');
     */
    mlfAssign(
      &df3, mlfFeval(mclValueVarargout(), mclVa(f3, "f3"), _mxarray9_, NULL));
    /*
     * 
     * % DEFINE SIZES
     * [s1,r] = size(w1);
     */
    mlfSize(mlfVarargout(&s1, &r, NULL), mclVa(w1, "w1"), NULL);
    /*
     * [s2,s1] = size(w2);
     */
    mlfSize(mlfVarargout(&s2, &s1, NULL), mclVa(*w2, "w2"), NULL);
    /*
     * [s3,s2] = size(w3);
     */
    mlfSize(mlfVarargout(&s3, &s2, NULL), mclVa(*w3, "w3"), NULL);
    /*
     * w1_ind = [1:(s1*r)];
     */
    mlfAssign(
      &w1_ind,
      mlfColon(_mxarray11_, mclMtimes(mclVv(s1, "s1"), mclVv(r, "r")), NULL));
    /*
     * b1_ind = [1:s1] + w1_ind(length(w1_ind));
     */
    mlfAssign(
      &b1_ind,
      mclPlus(
        mlfColon(_mxarray11_, mclVv(s1, "s1"), NULL),
        mclIntArrayRef1(
          mclVv(w1_ind, "w1_ind"), mclLengthInt(mclVv(w1_ind, "w1_ind")))));
    /*
     * w2_ind = [1:(s1*s2)] + b1_ind(length(b1_ind));
     */
    mlfAssign(
      &w2_ind,
      mclPlus(
        mlfColon(
          _mxarray11_, mclMtimes(mclVv(s1, "s1"), mclVv(s2, "s2")), NULL),
        mclIntArrayRef1(
          mclVv(b1_ind, "b1_ind"), mclLengthInt(mclVv(b1_ind, "b1_ind")))));
    /*
     * b2_ind = [1:s2] + w2_ind(length(w2_ind));
     */
    mlfAssign(
      &b2_ind,
      mclPlus(
        mlfColon(_mxarray11_, mclVv(s2, "s2"), NULL),
        mclIntArrayRef1(
          mclVv(w2_ind, "w2_ind"), mclLengthInt(mclVv(w2_ind, "w2_ind")))));
    /*
     * w3_ind = [1:(s2*s3)] + b2_ind(length(b2_ind));
     */
    mlfAssign(
      &w3_ind,
      mclPlus(
        mlfColon(
          _mxarray11_, mclMtimes(mclVv(s2, "s2"), mclVv(s3, "s3")), NULL),
        mclIntArrayRef1(
          mclVv(b2_ind, "b2_ind"), mclLengthInt(mclVv(b2_ind, "b2_ind")))));
    /*
     * b3_ind = [1:s3] + w3_ind(length(w3_ind));
     */
    mlfAssign(
      &b3_ind,
      mclPlus(
        mlfColon(_mxarray11_, mclVv(s3, "s3"), NULL),
        mclIntArrayRef1(
          mclVv(w3_ind, "w3_ind"), mclLengthInt(mclVv(w3_ind, "w3_ind")))));
    /*
     * ii = eye(b3_ind(length(b3_ind)));
     */
    mlfAssign(
      &ii,
      mlfEye(
        mclIntArrayRef1(
          mclVv(b3_ind, "b3_ind"), mclLengthInt(mclVv(b3_ind, "b3_ind"))),
        NULL));
    /*
     * dw1 = w1; db1 = b1;
     */
    mlfAssign(&dw1, mclVa(w1, "w1"));
    mlfAssign(&db1, mclVa(*b1, "b1"));
    /*
     * dw2 = w2; db2 = b2;
     */
    mlfAssign(&dw2, mclVa(*w2, "w2"));
    mlfAssign(&db2, mclVa(*b2, "b2"));
    /*
     * dw3 = w3; db3 = b3;
     */
    mlfAssign(&dw3, mclVa(*w3, "w3"));
    mlfAssign(&db3, mclVa(*b3, "b3"));
    /*
     * ext_p = nncpyi(p,s3);
     */
    mlfAssign(&ext_p, mlfNncpyi(mclVa(p, "p"), mclVv(s3, "s3")));
    /*
     * 
     * % PRESENTATION PHASE
     * [a1,a2,a3] = simuff(p,w1,b1,f1,w2,b2,f2,w3,b3,f3);
     */
    mlfAssign(
      &a1,
      mlfNSimuff(
        3,
        &a2,
        &a3,
        mclVa(p, "p"),
        mclVa(w1, "w1"),
        mclVa(*b1, "b1"),
        mclVa(f1, "f1"),
        mclVa(*w2, "w2"),
        mclVa(*b2, "b2"),
        mclVa(f2, "f2"),
        mclVa(*w3, "w3"),
        mclVa(*b3, "b3"),
        mclVa(f3, "f3")));
    /*
     * e = t-a3;
     */
    mlfAssign(&e, mclMinus(mclVa(t, "t"), mclVv(a3, "a3")));
    /*
     * SSE = sumsqr(e);
     */
    mlfAssign(&SSE, mlfSumsqr(mclVv(e, "e")));
    /*
     * 
     * % TRAINING RECORD
     * tr = zeros(1,me+1);
     */
    mlfAssign(
      tr, mlfZeros(_mxarray11_, mclPlus(mclVv(me, "me"), _mxarray11_), NULL));
    /*
     * tr(1) = SSE;
     */
    mclIntArrayAssign1(tr, mclVv(SSE, "SSE"), 1);
    /*
     * 
     * % PLOTTING FLAG
     * plottype = (r==1) & (s2==1);
     */
    mlfAssign(
      &plottype,
      mclAnd(
        mclEq(mclVv(r, "r"), _mxarray11_),
        mclEq(mclVv(s2, "s2"), _mxarray11_)));
    /*
     * 
     * % PLOTTING
     * newplot;
     */
    mclAssignAns(&ans, mlfNNewplot(0, NULL));
    /*
     * message = sprintf('TRAINLM: %%g/%g epochs, mu = %%g, SSE = %%g.\n',me);
     */
    mlfAssign(&message, mlfSprintf(NULL, _mxarray12_, mclVv(me, "me"), NULL));
    /*
     * fprintf(message,0,mu_init,SSE)
     */
    mclPrintAns(
      &ans,
      mlfNFprintf(
        0,
        mclVv(message, "message"),
        _mxarray14_,
        mclVv(mu_init, "mu_init"),
        mclVv(SSE, "SSE"),
        NULL));
    /*
     * if plottype
     */
    if (mlfTobool(mclVv(plottype, "plottype"))) {
        /*
         * h = plotfa(p,t,p,a3);
         */
        mlfAssign(
          &h,
          mlfNPlotfa(
            1, mclVa(p, "p"), mclVa(t, "t"), mclVa(p, "p"), mclVv(a3, "a3")));
    /*
     * else
     */
    } else {
        /*
         * h = ploterr(tr(1),eg);
         */
        mlfAssign(
          &h,
          mlfNPloterr(
            1, mclIntArrayRef1(mclVv(*tr, "tr"), 1), mclVv(eg, "eg"), NULL));
    /*
     * end
     */
    }
    /*
     * 
     * mu = mu_init;
     */
    mlfAssign(&mu, mclVv(mu_init, "mu_init"));
    /*
     * for i=1:me
     */
    {
        int v_ = mclForIntStart(1);
        int e_ = mclForIntEnd(mclVv(me, "me"));
        if (v_ > e_) {
            mlfAssign(i, _mxarray6_);
        } else {
            /*
             * 
             * % CHECK PHASE
             * if SSE < eg, i=i-1; break, end
             * 
             * % FIND JACOBIAN
             * ext_a1 = nncpyi(a1,s3);
             * ext_a2 = nncpyi(a2,s3);
             * d3 = feval(df3,a3);
             * ext_d3 = -nncpyd(d3);
             * ext_d2 = feval(df2,ext_a2,ext_d3,w3);
             * ext_d1 = feval(df1,ext_a1,ext_d2,w2);
             * j1 = learnlm(ext_p,ext_d1);
             * j2 = learnlm(ext_a1,ext_d2);
             * j3 = learnlm(ext_a2,ext_d3);
             * 

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -