📄 neural.c
字号:
/******************************************************************************//* *//* NEURAL - Main program for implementing all neural network functions *//* *//* Copyright (c) 1993 by Academic Press, Inc. *//* *//* All rights reserved. Permission is hereby granted, until further notice, *//* to make copies of this diskette, which are not for resale, provided these *//* copies are made from this master diskette only, and provided that the *//* following copyright notice appears on the diskette label: *//* (c) 1993 by Academic Press, Inc. *//* *//* Except as previously stated, no part of the computer program embodied in *//* this diskette may be reproduced or transmitted in any form or by any means,*//* electronic or mechanical, including input into storage in any information *//* system for resale, without permission in writing from the publisher. *//* *//* Produced in the United States of America. *//* *//* ISBN 0-12-479041-0 *//* *//******************************************************************************/#include <stdio.h>#include <string.h>#include <math.h>#include <ctype.h>#include <stdlib.h>#include "const.h" // System and limitation constants, typedefs, structs#include "classes.h" // Includes all class headers#include "funcdefs.h" // Function prototypes/* These are used in MEM.CPP for diagnostic memory allocation*/extern int mem_log ; // Keep a log on disk?extern long mem_used ; // Keeps track of memory usageextern char mem_name[80] ; // Full path name of log file/* Forward declarations for static routines*/int check_anneal ( char *command , char *rest , struct AnnealParams *anneal_params ) ;int check_genetic ( char *command , char *rest , struct GenInitParams *geninit_params ) ;int check_kohonen ( char *command , char *rest , struct KohParams *koh_params , Network **net ) ;int check_learn_params ( char *command , char *rest , struct LearnParams *learn_params , int netmod ) ;int get_koh_init ( char *rest ) ;int get_layer_init ( char *rest ) ;int get_yn ( char *msg ) ;int ok_to_clear_tset ( TrainingSet **tset ) ;int ok_to_clear_weights ( Network **network ) ;/*-------------------------------------------------------------------------------- Main entry point--------------------------------------------------------------------------------*/int main ( int argc , // Number of command line arguments (includes prog name) char *argv[] // Arguments (prog name is argv[0]) ){/* Declarations of local variables*//* User's command control line related variables are here. Control_file_number and control_files permit nesting of 'CONTROL' commands. If control_file_number equals -1, control commands are read from stdin. Otherwise they are read from that file in FILE *control_files. Up to MAX_CONTROL_FILES can be stacked.*/ int control_file_number = -1 ; // Stack pointer for control files FILE *control_files[MAX_CONTROL_FILES] ; // This is the stack char *control_line ; // User's commands here char *command, *rest ; // Pointers to its command and parameter parts int n_command, n_rest ; // Lengths of those parts/* These are network parameters which may be set by the user via commands. They are initialized to defaults which indicate that the user has not yet set them. As they are set, their current values are placed here. When learning is done for a network, their values are copied from here into the network object. When a network is read, the object's values are copied from it to here. Otherwise, these variables are not used; the values in the network object itself are used. The only purpose of these variables is to keep track of current values.*/ int net_model = -1 ; // Network model (see NETMOD_? in CONST.H) int out_model = -1 ; // Output model (see OUTMOD_? in CONST.H) int n_inputs = -1 ; // Number of input neurons int n_outputs = -1 ; // Number of output neurons int n_hidden1 = -1 ; // Number of hidden layer one neurons int n_hidden2 = -1 ; // Ditto layer 2 (0 if just one hidden layer) TrainingSet *tset = NULL ; // Training set here Network *network = NULL ; // Network here struct LearnParams learn_params ; // General learning parameters struct AnnealParams anneal_params ; // Simulated annealing parameters struct GenInitParams geninit_params ; // Genetic initialization parameters struct KohParams koh_params ; // Kohonen parameters int classif_output = -1 ; // Current class (0=reject) for classif training char out_file[80] = "" ; // File for EXECUTE output double threshold ; // CLASSIFY confusion reject cutoff/* Miscellaneous variables*/ int i, n, m ; double p ; char *msg ; FILE *fp ;/*-------------------------------------------------------------------------------- Program starts here. Verify that a careless user didn't fail to set the integer size correctly when compiling.--------------------------------------------------------------------------------*/#if VERSION_16_BIT if (sizeof(int) > 2) { printf ( "\nRecompile with VERSION_16_BIT set to 0 in CONST.H" ) ; exit ( 1 ) ; }#else if (sizeof(int) < 4) { printf ( "\nRecompile with VERSION_16_BIT set to 1 in CONST.H" ) ; exit ( 1 ) ; }#endifprintf ( "\nNEURAL - Program to train and test neural networks" ) ;printf("\nCopyright (c) 1993 by Academic Press, Inc.");printf("\nAll rights reserved. Permission is hereby granted, until further notice,");printf("\nto make copies of this diskette, which are not for resale, provided these");printf("\ncopies are made from this master diskette only, and provided that the");printf("\nfollowing copyright notice appears on the diskette label:");printf("\n(c) 1993 by Academic Press, Inc.");printf("\nExcept as previously stated, no part of the computer program embodied in");printf("\nthis diskette may be reproduced or transmitted in any form or by any means,");printf("\nelectronic or mechanical, including input into storage in any information");printf("\nsystem for resale, without permission in writing from the publisher.");printf("\nProduced in the United States of America.");printf("\nISBN 0-12-479041-0");/* Process command line parameters*/ mem_name[0] = 0 ; // Default is no memory allocation file for (i=1 ; i<argc ; i++) { // Process all command line args str_to_upr ( argv[i] ) ; // Easier if all upper case if (! strcmp ( argv[i] , "/DEBUG" )) { sscanf ( argv[++i] , "%s" , mem_name ) ; if ((strlen ( mem_name ) > 1) || ! isalpha ( mem_name[0] )) { printf ( "\nIllegal DEBUG drive (%s); must be 1 letter." ) ; exit ( 1 ) ; } continue ; } printf ( "\nIllegal command line parameter (%s)", argv[i] ) ; exit ( 1 ) ; }/* Initialize memory allocation debugging*/ if (strlen ( mem_name )) { strcat ( mem_name , ":mem.log" ) ; fp = fopen ( mem_name , "wt" ) ; if (fp == NULL) { printf ( "\nCannot open debugging file %s", mem_name ) ; exit ( 1 ) ; } fclose ( fp ) ; mem_log = 1 ; } else mem_log = 0 ; mem_used = 0 ;/* Initialize defaults*/ learn_params.init = -1 ; learn_params.quit_err = 0.0 ; learn_params.retries = 32767 ; anneal_params.temps0 = 3 ; anneal_params.temps = 4 ; anneal_params.iters0 = 50 ; anneal_params.iters = 20 ; anneal_params.setback0 = 50 ; anneal_params.setback = 20 ; anneal_params.start0 = 3.0 ; anneal_params.start = 4.0 ; anneal_params.stop0 = 1.0 ; anneal_params.stop = 0.02 ; geninit_params.pool = 50 ; geninit_params.gens = 3 ; geninit_params.climb = 0 ; geninit_params.overinit = 1.5 ; geninit_params.pcross = 0.8 ; geninit_params.pmutate = 0.0001 ; koh_params.normalization = 0 ; // 0=multiplicative, 1=Z koh_params.learn_method = 1 ; // 0=additive, 1=subtractive koh_params.rate = 0.4 ; // learning rate koh_params.reduction = 0.99 ; // learning rate reduction learn_params.ap = &anneal_params ; learn_params.gp = &geninit_params ; learn_params.kp = &koh_params ; act_func_init () ; // Initialize interpolation table for activation function MEMTEXT ( "NEURAL: control_line, msg" ) ; if (((control_line = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL) || ((msg = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)) { printf ( "\nInsufficient memory" ) ; exit ( 1 ) ; }/* Main loop processes all commands*/ for (;;) { get_control_line ( control_line , &control_file_number, control_files ) ; split_control_line ( control_line , &command , &n_command , &rest , &n_rest ) ; if (! n_command) { if (n_rest) { sprintf ( msg , "No colon after command: %s", rest ) ; error_message ( msg ) ; } continue ; } sprintf ( msg , "%s : %s", command, rest ) ; normal_message ( msg ) ;/* Act on the command*/ if (! strcmp ( command , "QUIT" )) break ; if (! strcmp ( command , "CONTROL" )) { stack_control_file ( rest , &control_file_number , control_files ) ; continue ; } if (! strcmp ( command , "NETWORK MODEL" )) { if (! strcmp ( rest , "LAYER" )) n = NETMOD_LAYER ; else if (! strcmp ( rest , "KOHONEN" )) n = NETMOD_KOH ; else { sprintf ( msg , "Illegal NETWORK MODEL: %s", rest ) ; error_message ( msg ) ; continue ; } if (net_model == n) continue ; if (ok_to_clear_weights( &network )) { net_model = n ; learn_params.init = -1 ; } else warning_message ( "Command aborted" ) ; continue ; } if (! strcmp ( command , "OUTPUT MODEL" )) { if (! strcmp ( rest , "CLASSIFY" )) n = OUTMOD_CLASSIFY ; else if (! strcmp ( rest , "AUTO" )) n = OUTMOD_AUTO ; else if (! strcmp ( rest , "GENERAL" )) n = OUTMOD_GENERAL ; else { sprintf ( msg , "Illegal OUTPUT MODEL: %s", rest ) ; error_message ( msg ) ; continue ; } if (out_model == n) continue ; if ((ok_to_clear_tset( &tset )) && (ok_to_clear_weights( &network))) out_model = n ; else warning_message ( "Command aborted" ) ; continue ; } if (! strcmp ( command , "N INPUTS" )) { m = sscanf ( rest , "%d" , &n ) ; if ((m <= 0) || (n <= 0) || (n > MAX_INPUTS)) { sprintf ( msg , "Illegal N INPUTS: %s", rest ) ; error_message ( msg ) ; continue ; } if (n_inputs == n) continue ; if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network))) n_inputs = n ; else warning_message ( "Command aborted" ) ; continue ; } if (! strcmp ( command , "N OUTPUTS" )) { m = sscanf ( rest , "%d" , &n ) ; if ((m <= 0) || (n <= 0) || (n > MAX_OUTPUTS)) { sprintf ( msg , "Illegal N OUTPUTS: %s", rest ) ; error_message ( msg ) ;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -