📄 lvqt.c
字号:
error(E_ARGCNT); /* of arguments */ if ((!fn_pat || !*fn_pat) && (!fn_in || !*fn_in)) error(E_STDIN); /* stdin must not be used twice */ #else if (k != 3) error(E_ARGCNT); /* check the number of arguments */ if (fn_hdr && (strcmp(fn_hdr, "-") == 0)) fn_hdr = ""; /* convert "-" to "" */ i = (!fn_dom || !*fn_dom) ? 1 : 0; if (!fn_pat || !*fn_pat) i++; if ( fn_hdr && !*fn_hdr) i++;/* check assignments of stdin: */ if (i > 1) error(E_STDIN); /* stdin must not be used twice */ if (fn_hdr) /* set the header file flag */ flags = AS_ATT | (flags & ~AS_DFLT); #endif imode = code(initab, ininame); /* code the initialization mode */ if (imode < 0) error(E_MODE, ininame); nmode = code(nrmtab, nrmname); /* code the initialization mode */ if (nmode < 0) error(E_MODE, nrmname); method |= code(updtab, updname); /* code the size update method */ if (method < 0) error(E_METHOD, updname); if (params[0] <= 0) error(E_PARAM, params[0]); if (params[1] < 0) error(E_PARAM, params[1]); if (lrate < 0) error(E_LRATE, lrate); dseed(seed); /* init. the random number generator */ #ifdef MATVERSION /* --- read prototype description --- */ if (k > 2) { /* if an input file is given */ if ((!fn_in || !*fn_in) && (!fn_pat || !*fn_pat)) error(E_STDIN); /* check assignments of std. input */ scan = sc_create(fn_in); /* create a scanner */ if (!scan) error((!fn_in || !*fn_in) ? E_NOMEM : E_FOPEN, fn_in); fprintf(stderr, "\nreading %s ... ", sc_fname(scan)); if (sc_nexter(scan) < 0) /* start scanning (get first token) */ error(E_PARSE, sc_fname(scan)); lvq = lvq_parse(scan,0); /* parse the network descriptions */ if (!lvq || !sc_eof(scan)) /* and check for end of file */ error(E_PARSE, sc_fname(scan)); if (owrite) { /* if to overwrite the parameters */ lvq_type(lvq, type, radius); /* set the prototype type */ lvq_actfn(lvq, (gauss) ? rf_gauss : rf_cauchy, params); } /* set the activation function */ valcnt = lvq_dim(lvq); /* retrieve the number of dimensions */ veccnt = lvq_cnt(lvq); /* and the number of ref. vectors */ fprintf(stderr, "[%d vector(s)] done.", veccnt); } /* print a success message */ #else /* --- read attribute set (and neural network) --- */ scan = sc_create(fn_dom); /* create a scanner */ if (!scan) error((!fn_dom || !*fn_dom) ? E_NOMEM : E_FOPEN, fn_dom); attset = as_create("domains", att_delete); if (!attset) error(E_NOMEM); /* create an attribute set */ fprintf(stderr, "\nreading %s ... ", sc_fname(scan)); if ((sc_nexter(scan) < 0) /* start scanning (get first token) */ || (as_parse(attset, scan, AT_ALL) != 0) || (as_attcnt(attset) <= 0)) /* parse attribute set */ error(E_PARSE, sc_fname(scan)); if ((sc_token(scan) == T_ID) /* if there is a neural network */ && (strcmp(sc_value(scan), "lvqnet")) == 0) { lvq = lvq_parsex(scan, attset, 0); /* parse the neural network */ if (!lvq || !sc_eof(scan)) error(E_PARSE, sc_fname(scan)); fprintf(stderr, "[%d attribute(s), ", as_attcnt(attset)); fprintf(stderr, "%d vectors(s)] done.\n", lvq_cnt(lvq)); if (owrite) { /* if to overwrite the parameters */ lvq_type(lvq, type, radius); /* set the prototype type */ lvq_actfn(lvq, (gauss) ? rf_gauss : rf_cauchy, params); } } /* set the activation function */ else { /* if there is no neural network */ if (!sc_eof(scan)) error(E_PARSE, sc_fname(scan)); fprintf(stderr, "[%d attribute(s)] done.\n", as_attcnt(attset)); } /* print a success message */ sc_delete(scan); scan = NULL; /* delete the scanner */ #endif #ifdef MATVERSION /* --- read training patterns --- */ if (fn_pat && *fn_pat) /* if a file name is given, */ in = fopen(fn_pat, "r"); /* open the file for reading */ else { /* if no file name is given, */ in = stdin; fn_pat = "<stdin>"; } /* use std. input */ fprintf(stderr, "\nreading %s ... ", fn_pat); if (!in) error(E_FOPEN, fn_pat); tfscan = tfs_create(); /* create a table file scanner and */ if (!tfscan) error(E_NOMEM); /* set the separator characters */ if (blanks) tfs_chars(tfscan, TFS_BLANK, blanks); if (fldseps) tfs_chars(tfscan, TFS_FLDSEP, fldseps); if (recseps) tfs_chars(tfscan, TFS_RECSEP, recseps); matrix = mat_readx(tfscan, in, 0, valcnt); if (!matrix) { /* read the training patterns */ err = tfs_err(tfscan); /* on error get the error info. */ error(err->code, fn_pat, err->rec, err->s, err->fld, err->exp); } /* abort with an error message */ patcnt = mat_rowcnt(matrix); /* get the number of data points */ valcnt = mat_colcnt(matrix); /* and their dimensionality */ if (tfs_delim(tfscan) != TFS_EOF) /* check for end of file */ error(E_VALUE, fn_pat, patcnt+1, "\"\"", 1); if (in != stdin) { /* if not read from standard input, */ fclose(in); in = NULL; } /* close the input file */ if (patcnt <= 0) error(E_PATCNT); fprintf(stderr, "[%d pattern(s)] done.\n", patcnt); #else /* --- read table --- */ as_chars(attset, blanks, fldseps, recseps, ""); table = io_tabin(attset, fn_hdr, fn_pat, flags, "table", 1); if (!table) error(1); /* read the table file */ #endif /* --- create neural network --- */ fprintf(stderr, "training neural network ... "); if (!lvq) { /* if no neural network was read */ #ifdef MATVERSION lvq = (LVQNET*)lvq_create(mat_colcnt(matrix), veccnt); #else lvq = (LVQNET*)lvq_createx(attset, 0, veccnt); #endif if (!lvq) error(E_NOMEM); /* create a new network and set */ lvq_type(lvq, type,radius); /* proto. type and act. function */ lvq_actfn(lvq, (gauss) ? rf_gauss : rf_cauchy, params); #ifdef MATVERSION for (i = patcnt; --i >= 0;) /* determine the ranges of values */ lvq_reg(lvq, mat_row(matrix, i), 1); if (norm) /* if to normalize the data, */ lvq_reg(lvq, NULL, 0); /* compute the scaling factors */ #else for (i = tab_tplcnt(table); --i >= 0; ) /* determine the */ lvq_regx(lvq, tab_tpl(table, i)); /* ranges of values */ if (norm) /* if to normalize the data, */ lvq_regx(lvq, NULL); /* compute the scaling factors */ #endif if (imode != LVQ_POINTS) /* if not to use the training data */ lvq_init(lvq, imode, range, drand, NULL); else { /* if to select random data points, */ #ifdef MATVERSION mat_shuffle(matrix, drand); /* shuffle the data points */ for (i = veccnt; --i >= 0; ) lvq_init(lvq, imode, range, drand, mat_row(matrix, i % patcnt)); #else tab_shuffle(table, 0, INT_MAX, drand); k = tab_tplcnt(table); /* shuffle the data tuples */ for (i = veccnt; --i >= 0; ) { lvq_valuex(lvq, tab_tpl(table, i % k)); lvq_init(lvq, imode, range, drand, NULL); } /* use the first veccnt tuples */ #endif /* (i.e. their corresp. data vectors) */ } /* as the initial reference vectors */ } /* --- train neural network --- */ lvq_norm (lvq, nmode, wtarf);/* set the normalization mode, */ lvq_lrate (lvq, lrate, decay);/* the learning rate and its decay, */ lvq_exp (lvq, exp); /* the adaptation exponent, */ lvq_scale (lvq, scale); /* the size scaling factor, */ lvq_method(lvq, method); /* and the update method */ u = update; /* initialize the counter */ for (k = 0; k < epochs; k++){ /* compute a maximum number of epochs */ if ((k & 0x00ff) == 0) /* print the current number of epochs */ fprintf(stderr, "%8d\b\b\b\b\b\b\b\b", k); #ifdef MATVERSION if (shuffle) /* shuffle the training patterns */ mat_shuffle(matrix, drand); for (max = 0, i = patcnt; --i >= 0; ) { lvq_aggr(lvq, mat_row(matrix, i), 1); #else /* aggregate the data vectors */ if (shuffle) /* shuffle the training patterns */ tab_shuffle(table, 0, INT_MAX, drand); for (max = 0, i = tab_tplcnt(table); --i >= 0; ) { tpl = tab_tpl(table, i); /* traverse the tuples and */ lvq_valuex(lvq, tpl); /* aggregate the data vector */ lvq_aggr(lvq, NULL, tpl_getwgt(tpl)); #endif if ((update > 0) && (--u <= 0)) { u = update; /* update the prototypes */ chg = lvq_update(lvq); /* every 'update' patterns */ if (chg > max) max = chg; } /* determine the maximum change */ } /* of a center coordinate */ if (update <= 0) /* if no number of patterns is given, */ max = lvq_update(lvq); /* update once in each epoch */ if (max <= trmchg) break; /* check the termination criterion */ } /* write a success message */ fprintf(stderr, "[%d epoch(s)] done.\n", k); /* --- write neural network --- */ if (fn_out && *fn_out) /* if an output file name is given, */ out = fopen(fn_out, "w"); /* open the output file */ else { /* if no output file name is given, */ out = stdout; fn_out = "<stdout>"; } /* write to std. output */ fprintf(stderr, "writing %s ... ", fn_out); if (!out) error(E_FOPEN, fn_out); #ifndef MATVERSION if (as_desc(attset, out, AS_TITLE|AS_MARKED|AS_IVALS, maxlen) != 0) error(E_FWRITE, fn_out); /* describe attribute domains */ fprintf(out, "\n"); /* leave one line empty */ #endif if (lvq_desc(lvq, out, dmode, maxlen) != 0) error(E_FWRITE, fn_out); /* describe the reference vectors */ if (out != stdout) { /* if not written to standard output, */ i = fclose(out); out = NULL;/* close the output file */ if (i != 0) error(E_FWRITE, fn_out); } /* check for a write error and */ fprintf(stderr, "done.\n"); /* print a success message */ /* --- clean up --- */ #ifndef NDEBUG #ifdef MATVERSION mat_delete(matrix); /* delete the training patterns */ tfs_delete(tfscan); /* and the table file scanner */ #else tab_delete(table, 1); /* delete the training patterns */ #endif lvq_delete(lvq); /* delete the neural network */ #endif #ifdef STORAGE showmem("at end of program"); /* check memory usage */ #endif return 0; /* return 'ok' */} /* main() */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -