⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 sofmtrain.c

📁 nnToolKit 神经网络工具包是基于 MATLAB 神经网络工具箱自行开发的一组神经网络算法函数库
💻 C
字号:
/*
 * MATLAB Compiler: 3.0
 * Date: Sun May 13 16:47:40 2007
 * Arguments: "-B" "macro_default" "-O" "all" "-O" "fold_scalar_mxarrays:on"
 * "-O" "fold_non_scalar_mxarrays:on" "-O" "optimize_integer_for_loops:on" "-O"
 * "array_indexing:on" "-O" "optimize_conditionals:on" "-M" "-silentsetup" "-d"
 * "d:/MATLAB6p5/work/nnToolKit/src" "-B" "csglcom:nnToolKit,nnToolKit,2.0"
 * "-B" "sgl" "-m" "-W" "main" "-L" "C" "-t" "-T" "link:exe" "-h"
 * "libmmfile.mlib" "-W" "mainhg" "libmwsglm.mlib" "-t" "-W"
 * "comhg:nnToolKit,nnToolKit,2.0" "-T" "link:lib" "-h" "libmmfile.mlib" "-i"
 * "-i" "D:/MATLAB6p5/work/nnToolKit/lmnet/LmSimu.m"
 * "D:/MATLAB6p5/work/nnToolKit/lmnet/LmTrain.m"
 * "D:/MATLAB6p5/work/nnToolKit/sofm/SofmSimu.m"
 * "D:/MATLAB6p5/work/nnToolKit/sofm/SofmTrain.m" 
 */
#include "sofmtrain.h"
#include "initsm.h"
#include "libmatlbm.h"
#include "libmmfile.h"
#include "libmwsglm.h"
#include "nbgrid.h"
#include "nntwarn.h"
#include "trainsm.h"

static mxChar _array1_[3] = { 'O', 'F', 'F' };
static mxArray * _mxarray0_;
static mxArray * _mxarray2_;

static mxChar _array4_[14] = { 'i', 'n', 'p', 'u', 't', '_', 'p',
                               'a', 'r', 'a', '%', 's', '%', 's' };
static mxArray * _mxarray3_;

static mxChar _array6_[4] = { '.', 't', 'x', 't' };
static mxArray * _mxarray5_;

static mxChar _array8_[1] = { 'r' };
static mxArray * _mxarray7_;

static mxChar _array10_[2] = { '%', 'f' };
static mxArray * _mxarray9_;

static mxChar _array12_[5] = { 'w', '%', 's', '%', 's' };
static mxArray * _mxarray11_;

static mxChar _array14_[4] = { '.', 'd', 'a', 't' };
static mxArray * _mxarray13_;

static mxChar _array16_[1] = { 'w' };
static mxArray * _mxarray15_;

static mxChar _array18_[6] = { '%', '9', '.', '4', 'f', ' ' };
static mxArray * _mxarray17_;
static mxArray * _mxarray19_;

static mxChar _array21_[3] = { 'a', 'l', 'l' };
static mxArray * _mxarray20_;

void InitializeModule_sofmtrain(void) {
    _mxarray0_ = mclInitializeString(3, _array1_);
    _mxarray2_ = mclInitializeDouble(-1.0);
    _mxarray3_ = mclInitializeString(14, _array4_);
    _mxarray5_ = mclInitializeString(4, _array6_);
    _mxarray7_ = mclInitializeString(1, _array8_);
    _mxarray9_ = mclInitializeString(2, _array10_);
    _mxarray11_ = mclInitializeString(5, _array12_);
    _mxarray13_ = mclInitializeString(4, _array14_);
    _mxarray15_ = mclInitializeString(1, _array16_);
    _mxarray17_ = mclInitializeString(6, _array18_);
    _mxarray19_ = mclInitializeDouble(1.0);
    _mxarray20_ = mclInitializeString(3, _array21_);
}

void TerminateModule_sofmtrain(void) {
    mxDestroyArray(_mxarray20_);
    mxDestroyArray(_mxarray19_);
    mxDestroyArray(_mxarray17_);
    mxDestroyArray(_mxarray15_);
    mxDestroyArray(_mxarray13_);
    mxDestroyArray(_mxarray11_);
    mxDestroyArray(_mxarray9_);
    mxDestroyArray(_mxarray7_);
    mxDestroyArray(_mxarray5_);
    mxDestroyArray(_mxarray3_);
    mxDestroyArray(_mxarray2_);
    mxDestroyArray(_mxarray0_);
}

static mxArray * Msofmtrain(int nargout_,
                            mxArray * ModelNo,
                            mxArray * NetPara,
                            mxArray * TrainPara,
                            mxArray * DataDir);

_mexLocalFunctionTable _local_function_table_sofmtrain
  = { 0, (mexFunctionTableEntry *)NULL };

/*
 * The function "mlfSofmtrain" contains the normal interface for the
 * "sofmtrain" M-function from file
 * "d:\matlab6p5\work\nntoolkit\sofm\sofmtrain.m" (lines 1-56). This function
 * processes any input arguments and passes them to the implementation version
 * of the function, appearing above.
 */
mxArray * mlfSofmtrain(mxArray * ModelNo,
                       mxArray * NetPara,
                       mxArray * TrainPara,
                       mxArray * DataDir) {
    int nargout = 1;
    mxArray * retstr = NULL;
    mlfEnterNewContext(0, 4, ModelNo, NetPara, TrainPara, DataDir);
    retstr = Msofmtrain(nargout, ModelNo, NetPara, TrainPara, DataDir);
    mlfRestorePreviousContext(0, 4, ModelNo, NetPara, TrainPara, DataDir);
    return mlfReturnValue(retstr);
}

/*
 * The function "mlxSofmtrain" contains the feval interface for the "sofmtrain"
 * M-function from file "d:\matlab6p5\work\nntoolkit\sofm\sofmtrain.m" (lines
 * 1-56). The feval function calls the implementation version of sofmtrain
 * through this function. This function processes any input arguments and
 * passes them to the implementation version of the function, appearing above.
 */
void mlxSofmtrain(int nlhs, mxArray * plhs[], int nrhs, mxArray * prhs[]) {
    mxArray * mprhs[4];
    mxArray * mplhs[1];
    int i;
    if (nlhs > 1) {
        mlfError(
          mxCreateString(
            "Run-time Error: File: sofmtrain Line: 4 Column:"
            " 1 The function \"sofmtrain\" was called with m"
            "ore than the declared number of outputs (1)."),
          NULL);
    }
    if (nrhs > 4) {
        mlfError(
          mxCreateString(
            "Run-time Error: File: sofmtrain Line: 4 Column:"
            " 1 The function \"sofmtrain\" was called with m"
            "ore than the declared number of inputs (4)."),
          NULL);
    }
    for (i = 0; i < 1; ++i) {
        mplhs[i] = NULL;
    }
    for (i = 0; i < 4 && i < nrhs; ++i) {
        mprhs[i] = prhs[i];
    }
    for (; i < 4; ++i) {
        mprhs[i] = NULL;
    }
    mlfEnterNewContext(0, 4, mprhs[0], mprhs[1], mprhs[2], mprhs[3]);
    mplhs[0] = Msofmtrain(nlhs, mprhs[0], mprhs[1], mprhs[2], mprhs[3]);
    mlfRestorePreviousContext(0, 4, mprhs[0], mprhs[1], mprhs[2], mprhs[3]);
    plhs[0] = mplhs[0];
}

/*
 * The function "Msofmtrain" is the implementation version of the "sofmtrain"
 * M-function from file "d:\matlab6p5\work\nntoolkit\sofm\sofmtrain.m" (lines
 * 1-56). It contains the actual compiled code for that M-function. It is a
 * static function and must only be called from one of the interface functions,
 * appearing below.
 */
/*
 * %此为Sofm网络训练程序
 * %自组织特征映射模型(Self-Organizing feature Map),认为一个神经网络接受外界输入模式时,将会分为不同的区域,各区域对输入模式具有不同的响应特征,同时这一过程是自动完成的。各神经元的连接权值具有一定的分布。最邻近的神经元互相刺激,而较远的神经元则相互抑制,更远一些的则具有较弱的刺激作用。自组织特征映射法是一种无教师的聚类方法。
 * %完成分类训练,并保存权值和分类后各类别下的像素矩阵
 * function retstr = SofmTrain(ModelNo,NetPara,TrainPara,DataDir)
 */
static mxArray * Msofmtrain(int nargout_,
                            mxArray * ModelNo,
                            mxArray * NetPara,
                            mxArray * TrainPara,
                            mxArray * DataDir) {
    mexLocalFunctionTable save_local_function_table_
      = mclSetCurrentLocalFunctionTable(&_local_function_table_sofmtrain);
    mxArray * retstr = NULL;
    mxArray * fww = NULL;
    mxArray * m = NULL;
    mxArray * tp = NULL;
    mxArray * lr = NULL;
    mxArray * me = NULL;
    mxArray * df = NULL;
    mxArray * w = NULL;
    mxArray * count = NULL;
    mxArray * x = NULL;
    mxArray * frin_para = NULL;
    mxArray * DataNum = NULL;
    mxArray * ClassifyNum = NULL;
    mxArray * InputDim = NULL;
    mxArray * olddir = NULL;
    mxArray * ans = NULL;
    mclCopyArray(&ModelNo);
    mclCopyArray(&NetPara);
    mclCopyArray(&TrainPara);
    mclCopyArray(&DataDir);
    /*
     * NNTWARN OFF
     */
    mlfNntwarn(_mxarray0_);
    /*
     * retstr=-1;
     */
    mlfAssign(&retstr, _mxarray2_);
    /*
     * %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
     * %ModelNo='1';
     * % 网络参数
     * %NetPara(1)=1;      %输入层节点数
     * %NetPara(2)=5;      %分类数
     * %NetPara(3)=65536;  %训练数据组数
     * 
     * %TrainPara(1)=50;   % 训练过程每df步显示1次数
     * %TrainPara(2)=3000; % 最多训练步数
     * %TrainPara(3)=0.02; % 学习率
     * 
     * %DataDir='.';
     * %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
     * %保留原目录
     * olddir=pwd;
     */
    mlfAssign(&olddir, mlfPwd());
    /*
     * %进入数据所在目录
     * cd(DataDir);
     */
    mclAssignAns(&ans, mlfNCd(0, mclVa(DataDir, "DataDir")));
    /*
     * 
     * % 网络参数
     * InputDim=NetPara(1);       %输入层节点数
     */
    mlfAssign(&InputDim, mclIntArrayRef1(mclVa(NetPara, "NetPara"), 1));
    /*
     * ClassifyNum=NetPara(2);    %分类数
     */
    mlfAssign(&ClassifyNum, mclIntArrayRef1(mclVa(NetPara, "NetPara"), 2));
    /*
     * DataNum=NetPara(3);        %训练数据组数
     */
    mlfAssign(&DataNum, mclIntArrayRef1(mclVa(NetPara, "NetPara"), 3));
    /*
     * 
     * frin_para=fopen(sprintf('input_para%s%s',ModelNo,'.txt'),'r');      %输入数据文件
     */
    mlfAssign(
      &frin_para,
      mlfFopen(
        NULL,
        NULL,
        mlfSprintf(
          NULL, _mxarray3_, mclVa(ModelNo, "ModelNo"), _mxarray5_, NULL),
        _mxarray7_,
        NULL));
    /*
     * [x,count]=fscanf(frin_para,'%f',[InputDim,DataNum]);   %取输入数据 
     */
    mlfAssign(
      &x,
      mlfFscanf(
        &count,
        mclVv(frin_para, "frin_para"),
        _mxarray9_,
        mlfHorzcat(
          mclVv(InputDim, "InputDim"), mclVv(DataNum, "DataNum"), NULL)));
    /*
     * fclose(frin_para);
     */
    mclAssignAns(&ans, mlfFclose(mclVv(frin_para, "frin_para")));
    /*
     * 
     * % 对前向网络进行初始化
     * w=initsm(x,ClassifyNum);
     */
    mlfAssign(&w, mlfInitsm(mclVv(x, "x"), mclVv(ClassifyNum, "ClassifyNum")));
    /*
     * % 训练过程每df步显示1次数
     * df=TrainPara(1);
     */
    mlfAssign(&df, mclIntArrayRef1(mclVa(TrainPara, "TrainPara"), 1));
    /*
     * % 最多训练步数
     * me=TrainPara(2)
     */
    mlfAssign(&me, mclIntArrayRef1(mclVa(TrainPara, "TrainPara"), 2));
    mclPrintArray(mclVv(me, "me"), "me");
    /*
     * % 学习率
     * lr=TrainPara(3);
     */
    mlfAssign(&lr, mclIntArrayRef1(mclVa(TrainPara, "TrainPara"), 3));
    /*
     * 
     * % 神经网络训练参数
     * tp=[df me lr];
     */
    mlfAssign(
      &tp, mlfHorzcat(mclVv(df, "df"), mclVv(me, "me"), mclVv(lr, "lr"), NULL));
    /*
     * m=nbgrid(ClassifyNum);
     */
    mlfAssign(
      &m, mlfNbgrid(mclVv(ClassifyNum, "ClassifyNum"), NULL, NULL, NULL, NULL));
    /*
     * % 训练竞争层
     * w=trainsm(w,m,x,tp);
     */
    mlfAssign(
      &w,
      mlfTrainsm(mclVv(w, "w"), mclVv(m, "m"), mclVv(x, "x"), mclVv(tp, "tp")));
    /*
     * % 将训练结果权值写入文件
     * fww=fopen(sprintf('w%s%s',ModelNo,'.dat'),'w');
     */
    mlfAssign(
      &fww,
      mlfFopen(
        NULL,
        NULL,
        mlfSprintf(
          NULL, _mxarray11_, mclVa(ModelNo, "ModelNo"), _mxarray13_, NULL),
        _mxarray15_,
        NULL));
    /*
     * fprintf(fww,'%9.4f ',w);
     */
    mclAssignAns(
      &ans,
      mlfNFprintf(0, mclVv(fww, "fww"), _mxarray17_, mclVv(w, "w"), NULL));
    /*
     * fclose(fww);
     */
    mclAssignAns(&ans, mlfFclose(mclVv(fww, "fww")));
    /*
     * 
     * cd(olddir);
     */
    mclAssignAns(&ans, mlfNCd(0, mclVv(olddir, "olddir")));
    /*
     * 
     * retstr=1;
     */
    mlfAssign(&retstr, _mxarray19_);
    /*
     * close all;
     */
    mclAssignAns(&ans, mlfNClose(0, _mxarray20_, NULL));
    mclValidateOutput(retstr, 1, nargout_, "retstr", "sofmtrain");
    mxDestroyArray(ans);
    mxDestroyArray(olddir);
    mxDestroyArray(InputDim);
    mxDestroyArray(ClassifyNum);
    mxDestroyArray(DataNum);
    mxDestroyArray(frin_para);
    mxDestroyArray(x);
    mxDestroyArray(count);
    mxDestroyArray(w);
    mxDestroyArray(df);
    mxDestroyArray(me);
    mxDestroyArray(lr);
    mxDestroyArray(tp);
    mxDestroyArray(m);
    mxDestroyArray(fww);
    mxDestroyArray(DataDir);
    mxDestroyArray(TrainPara);
    mxDestroyArray(NetPara);
    mxDestroyArray(ModelNo);
    mclSetCurrentLocalFunctionTable(save_local_function_table_);
    return retstr;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -