📄 gen_init.c
字号:
/******************************************************************************//* *//* GEN_INIT - Use genetic method to initialize LayerNet weights *//* *//* Copyright (c) 1993 by Academic Press, Inc. *//* *//* All rights reserved. Permission is hereby granted, until further notice, *//* to make copies of this diskette, which are not for resale, provided these *//* copies are made from this master diskette only, and provided that the *//* following copyright notice appears on the diskette label: *//* (c) 1993 by Academic Press, Inc. *//* *//* Except as previously stated, no part of the computer program embodied in *//* this diskette may be reproduced or transmitted in any form or by any means,*//* electronic or mechanical, including input into storage in any information *//* system for resale, without permission in writing from the publisher. *//* *//* Produced in the United States of America. *//* *//* ISBN 0-12-479041-0 *//* *//******************************************************************************/#include <stdio.h>#include <string.h>#include <math.h>#include <ctype.h>#include <stdlib.h>#include "const.h" // System and limitation constants, typedefs, structs#include "classes.h" // Includes all class headers#include "funcdefs.h" // Function prototypes/* Declarations for local subroutines*/static void decode( char *popptr , int nin , int nh1 , int nh2 , double *w1 , double *w2);static void error_to_fitness ( int popsize , double favor_best , double fitfac , double *errors , double *fitness ) ;static void fitness_to_choices ( int popsize , double *fitness , int *choices );static void fval_to_fitness ( int popsize , double favor_best , double fitfac , double *fvals , double *fitness ) ;static void mutate ( char *child , int nvars , double pmutate );static void pick_parents ( int *nchoices , int *choices , int *parent1 , int *parent2 ) ;static void rand_ind ( char *popptr , int chromsize ) ;static void reproduce ( char *p1 , char *p2 , int first_child , int nvars , char *child , int *crosspt , int *split ) ;/* For translation speed we convert from gray codes to binary with a lookup table which is built in the first call.*/static unsigned char gray_code_table[256] ; // Translation tablestatic int gray_initialized = 0 ; // Has it been built yet?/* Entry point is here*/void LayerNet::gen_init ( TrainingSet *tptr , // Training set to use struct LearnParams *lptr // User's general learning parameters ){ int i, istart, individual, best_individual, generation, n_cross ; int first_child, parent1, parent2, improved, crosspt, nchoices, *choices ; int initpop, popsize, gens, climb, nvars, chromsize, split, ind ; double pcross, pmutate, error, besterror, *errors, *fitness, worst ; double fquit, favor_best, fitfac, maxerr, minerr, avgerr, overinit ; SingularValueDecomp *sptr ; struct GenInitParams *gptr ; // User's genetic initialization parameters char *pool1, *pool2, *oldpop, *newpop, *popptr, *temppop, *best ; char msg[80] ; gptr = lptr->gp ; popsize = gptr->pool ; gens = gptr->gens ; climb = gptr->climb ; overinit = gptr->overinit ; pcross = gptr->pcross ; pmutate = gptr->pmutate ; fquit = lptr->quit_err ; favor_best = 3.1 ; fitfac = -20.0 ;/*-------------------------------------------------------------------------------- Do all scratch memory allocation.--------------------------------------------------------------------------------*//* Allocate the singular value decomposition object for REGRESS.*/ if (nhid2 == 0) // One hidden layer nvars = nhid1 + 1 ; else // Two hidden layers nvars = nhid2 + 1 ; MEMTEXT ( "GEN_INIT: new SingularValueDecomp" ) ; sptr = new SingularValueDecomp ( tptr->ntrain , nvars , 1 ) ; if ((sptr == NULL) || ! sptr->ok) { memory_message("for genetic initialization. Try ANNEAL NOREGRESS."); neterr = 1.0 ; // Flag failure to LayerNet::learn which called us if (sptr != NULL) delete sptr ; return ; } chromsize = nhid1 * (nin+1) ; // Length of an individual's chromosome if (nhid2) // is the number of hidden weights chromsize += nhid2 * (nhid1+1) ; errors = fitness = NULL ; choices = NULL ; pool1 = pool2 = NULL ; MEMTEXT ( "GEN_INIT: errors, fitness, choices, best, pool1,pool2"); if (((errors = (double*) MALLOC ( popsize * sizeof(double))) == NULL) || ((fitness = (double*) MALLOC ( popsize * sizeof(double))) == NULL) || ((best = (char*) MALLOC( chromsize )) == NULL) || ((choices = (int*) MALLOC ( popsize * sizeof(int))) == NULL) || ((pool1 = (char*) MALLOC( popsize * chromsize )) == NULL) || ((pool2 = (char*) MALLOC( popsize * chromsize )) == NULL)) { if (errors != NULL) FREE ( errors ) ; if (fitness != NULL) FREE ( fitness ) ; if (choices != NULL) FREE ( choices ) ; if (pool1 != NULL) FREE ( pool1 ) ; if (pool2 != NULL) FREE ( pool2 ) ; delete sptr ; memory_message("for genetic initialization. Try ANNEAL NOREGRESS." ) ; neterr = 1.0 ; // Flag failure to LayerNet::learn which called us return ; }/* Generate initial population pool. We also preserve the best weights across all generations, as this is what we will ultimately return to the user. Its mean square error is besterror.*/ besterror = 1.e30 ; // For saving best (across all individuals and gens) maxerr = avgerr = 0.0 ; // For progress display only best_individual = 0 ; // Safety only initpop = popsize * overinit ; // Overinitialization of initial population progress_message ( "\nGenerating initial population" ) ; for (ind=0 ; ind<initpop ; ind++) { // Try overinitialization times if (ind<popsize) // If still in pop size limit individual = ind ; // just use next avail space else { // Else we search entire pop worst = -1. ; // for the worst member for (i=0 ; i<popsize ; i++) { // which we will then replace if (errors[i] > worst) { worst = errors[i] ; individual = i ; } } avgerr -= worst ; // Exclude discards from average } popptr = pool1 + individual * chromsize ; // Build init pop in pool1 rand_ind ( popptr , chromsize ) ; // Randomly generate individual decode ( popptr , nin , nhid1 , nhid2 , // Convert genotype (chromosome) hid1_coefs , hid2_coefs ); // to phenotype (weights) error = regress ( tptr , sptr ) ; // Evaluate network error errors[individual] = error ; // and keep all errors if (error < besterror) { // Keep track of best besterror = error ; // as it is returned to user best_individual = individual ; // This is its index in pool1 } if (error > maxerr) // Max and average error are maxerr = error ; // for progress display only avgerr += error ; if (error <= fquit) break ; progress_message ( "." ) ; } sprintf (msg , "\nInitial pop: Min err=%7.4lf Max=%7.4lf Avg=%7.4lf", 100. * besterror, 100. * maxerr, 100.0 * avgerr / (double) popsize); progress_message ( msg ) ;/* The initial population has been built in pool1. Copy its best member to 'best' in case it never gets beat (unlikely but possible!). Also, we will need best if the climb option is true.*/ popptr = pool1 + best_individual * chromsize ; // Point to best memcpy ( best , popptr , chromsize ) ; // and save it/* This is the main generation loop. There are two areas for population pool storage: pool1 and pool2. At any given time, oldpop will be set to one of them, and newpop to the other. This avoids a lot of copying.*/ oldpop = pool1 ; // This is the initial population newpop = pool2 ; // The next generation is created here for (generation=0 ; generation<gens ; generation++) { if (error <= fquit) // We may have satisfied this in init pop break ; // So we test at start of generation loop error_to_fitness ( popsize , favor_best , fitfac , errors , fitness ) ; fitness_to_choices ( popsize , fitness , choices ) ; nchoices = popsize ; // Will count down as choices array emptied n_cross = pcross * popsize ; // Number crossing over first_child = 1 ; // Generating first of parent's 2 children? improved = 0 ; // Flags if we beat best if (climb) { // If we are to hill climb memcpy ( newpop , best , chromsize ) ; // start with best errors[0] = besterror ; // Record its error istart = 1 ; // and start children past it } else istart = 0 ;/* Generate the children*/ maxerr = avgerr = 0.0 ; // For progress display only minerr = 1.0 ; // Ditto for (individual=istart ; individual<popsize ; individual++) { popptr = newpop + individual * chromsize ; // Will put this child here if (first_child) // If this is the first of 2 children, pick parents pick_parents ( &nchoices , choices , &parent1 , &parent2 ) ; if (n_cross-- > 0) // Do crossovers first reproduce ( oldpop + parent1 * chromsize , oldpop + parent2 * chromsize , first_child , chromsize , popptr , &crosspt , &split ) ; else if (first_child) // No more crossovers, so just copy parent memcpy ( popptr , oldpop + parent1 * chromsize , chromsize ) ; else memcpy ( popptr , oldpop + parent2 * chromsize , chromsize ); if (pmutate > 0.0) mutate ( popptr , chromsize , pmutate ) ; decode ( popptr , nin , nhid1 , nhid2 , hid1_coefs , hid2_coefs ) ; error = regress ( tptr , sptr ) ; // Evaluate child's error errors[individual] = error ; // and keep each if (error < besterror) { // Keep track of best besterror = error ; // It will be returned to user best_individual = individual ; // This is its index in newpop improved = 1 ; // Flag so we copy it later } if (error > maxerr) // Min, max and average error maxerr = error ; // for progress display only if (error < minerr) minerr = error ; avgerr += error ; if (error <= fquit) break ; first_child = ! first_child ; } // For all genes in population/* We finished generating all children. If we improved (one of these children beat the best so far) then copy that child to the best. Swap oldpop and newpop for the next generation.*/ if (improved) { popptr = newpop + best_individual * chromsize ; // Point to best memcpy ( best , popptr , chromsize ) ; // and save it } temppop = oldpop ; // Switch old and new pops for next generation oldpop = newpop ; newpop = temppop ; sprintf(msg, "\nGeneration %3d: Min err=%7.4lf Max=%7.4lf Avg=%7.4lf", generation+1, 100. * minerr, 100. * maxerr, 100.0 * avgerr / (double) popsize ) ; progress_message ( msg ) ; }/* We are all done.*/ decode ( best , nin , nhid1 , nhid2 , hid1_coefs , hid2_coefs ) ; besterror = regress ( tptr , sptr ) ; // Evaluate network error MEMTEXT ( "GEN_INIT: errors, fitness, choices, best, pool1,pool2"); FREE ( errors ) ; FREE ( fitness ) ; FREE ( choices ) ; FREE ( best ) ; FREE ( pool1 ) ; FREE ( pool2 ) ; MEMTEXT ( "GEN_INIT: delete sptr" ) ; delete sptr ;}/*-------------------------------------------------------------------------------- error_to_fitness - Convert the objective function value of each individual to a scaled fitness value. The scaled fitness may be considered an expected frequency of choice.--------------------------------------------------------------------------------*/static void error_to_fitness ( int popsize , // Length of errors, fitness vectors double favor_best , // Factor for favoring best over average (2-3 is good) double fitfac , // Factor for converting error to raw fitness (-20 good) double *errors , // Input popsize vector of values of objective function double *fitness // Output popsize vector of scaled fitnesses ){ int individual ; double fit, avgfitness, minfitness, maxfitness, ftemp, tmult, tconst ;/* The first step is to convert the objective function value (which is to be minimized) into a raw (unscaled) fitness value. The best method can be problem dependent. Certainly, the mapping function must be decreasing, as we want smaller values of the objective function to map to larger values of fitness. Also, later calculations are simplified if the fitness is always positive. The conversion function used here is f(v) = exp ( k * v ) where k is a negative number. For objective functions which range from zero to one, as would be the case of a relative error function, a constant of about -20 is appropriate. This would map .001 to .98, .01 to .82 and .1 to .14.*/ avgfitness = 0.0 ; maxfitness = -1.e30 ; minfitness = 1.e30 ; for (individual=0 ; individual<popsize ; individual++) { fitness[individual] = fit = exp ( fitfac * errors[individual] ) ; avgfitness += fit ; if (fit > maxfitness) maxfitness = fit ;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -