⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 xcs.c

📁 Simple GA code (Pascal code from Goldberg, D. E. (1989), Genetic Algorithms in Search, Optimization,
💻 C
📖 第 1 页 / 共 3 页
字号:
/*/       (XCS-C 1.2)/	------------------------------------/	Learning Classifier System based on accuracy//     by /     Martin V. Butz/     Illinois Genetic Algorithms Laboratory (IlliGAL)/     University of Illinois at Urbana/Champaign/     butz@illigal.ge.uiuc.edu//     Last modified: 09-30-2003//     Main program*/#include <assert.h>#include <stdio.h>#include <stdlib.h>#include <string.h>#include <math.h>#include <unistd.h>#include <sys/resource.h>#include "classifierList.h"#include "actionSelection.h"#include "xcs.h"#include "env.h"#include "xcsMacros.h"int main(int args,char *argv[]){  FILE *env_file=NULL;  FILE *parameterFile=NULL;  struct XCS *xcsDefault;  /* set the priority */  /* setpriority(PRIO_PROCESS, getpid(), 19); */  /* randomize the pseudo-number generator */  randomize();  if(isMultiStep()){    /* initialize multi-step environment (additional input file required) */    if(args<2){      printf("Usage: xcs.out MAZEFILE [PARAMETERFILE]\n");      return 0;    }    if ((env_file = fopen(argv[1], "rt")) == NULL){      fprintf(stderr, "Cannot open file %s.\n",argv[1]);      return 0;    }    if(!initEnv(env_file))      return 0;    fclose(env_file);    /* open parameter file */        if(args>2){      if( (parameterFile =fopen(argv[2], "rt")) == NULL){	fprintf(stderr, "Cannot open parameter file %s.\n", argv[2]);	return 0;      }    }  }else{    /* initialize single-step environment */    if(!initEnv(0))      return 0;    /* open parameter file */    if(args>1) {      if( (parameterFile =fopen(argv[1], "rt")) == NULL)  {	fprintf(stderr, "Cannot open parameter file %s.\n", argv[1]);	return 0;      }    }  }  /* initialize default parameter settings */  assert((xcsDefault =( struct XCS *)calloc(1,sizeof(struct XCS)))!=NULL);  if(parameterFile != NULL) {    setParameterValues(parameterFile, xcsDefault);    fclose(parameterFile);  }else{    setMacroValues(xcsDefault);  }    /* print parameter settings */  fprintXCS(stdout, xcsDefault);  fprintEnv(stdout);  /* start the experiments */  startExperiments(xcsDefault);  /* free allocated memory */  freeXCS(xcsDefault);  freeEnv();  return 1;}/** * Starts sucessively the number of experiments chosen and records learning progress,  * averages, and standard deviations. */void startExperiments(struct XCS *xcsDefault){  int expcounter;  struct xClassifierSet *pop;  struct XCS *xcsActual;  FILE *tabFile, *tabFile2=NULL;  double **averages;  char *tabFileName;  /* make room to record all data to derive averages and standard deviations */  assert((averages = (double **)(calloc( xcsDefault->nrExps , sizeof(double *))))!=0);  for(expcounter=0; expcounter < xcsDefault->nrExps; expcounter++) {    assert((averages[expcounter] = (double *)(calloc( (8 + getConditionLength()) * ((int)(xcsDefault->maxNrSteps/xcsDefault->testFrequency)+1), sizeof(double))))!=0);  }  assert((tabFileName = (char *)(calloc( strlen(xcsDefault->tabOutFile)+5, sizeof(char))))!=0);    /* create date file with ending .txt monitoring online learning progress */  strcpy(tabFileName,xcsDefault->tabOutFile);  tabFileName[strlen(xcsDefault->tabOutFile)]='.';  tabFileName[strlen(xcsDefault->tabOutFile)+1]='t';  tabFileName[strlen(xcsDefault->tabOutFile)+2]='x';  tabFileName[strlen(xcsDefault->tabOutFile)+3]='t';  if ((tabFile = fopen(tabFileName, "wt")) == NULL) {    fprintf(stderr, "Cannot open file");    fprintf(stderr, tabFileName);    return;  }  free(tabFileName);  /* write parameter settings to file */  fprintXCS(tabFile, xcsDefault);  fprintEnv(tabFile);  /* start the experiments */  for( expcounter=0 ; expcounter < xcsDefault->nrExps ; expcounter++ ) {    fprintf(tabFile,"Next Experiment\n");    xcsActual = copyXCS(xcsDefault);        /* initialize the population */    pop=NULL;    if(xcsActual->initializePopulation)      pop=createRandomClassifierSet(getConditionLength(), xcsActual->maxPopSize, xcsActual->dontCareProb);    /* start one experiment */    if(isMultiStep())      startOneMultiStepExperiment(xcsActual, tabFile, &pop, averages, expcounter);    else      startOneSingleStepExperiment(xcsActual, tabFile, &pop, averages, expcounter, tabFile2);    /* print sorted population */    /*     * fprintf(tabFile, "\nNum Sorted Classifier List:\n");     * pop=sortClassifierSet(&pop, 0);     * fprintClassifierSet(tabFile,pop);     */         /* free population and parameter settings for this experiment */    freeClassifierSet(&pop);    freeXCS(xcsActual);  }  fclose(tabFile);  /* determine and record averages and standard deviations */  writeAveragePerformance(xcsDefault, averages);  /* free space */  for(expcounter=0; expcounter < xcsDefault->nrExps; expcounter++) {    free(averages[expcounter]);  }  free(averages);}/*########################## in a single step environment ##########################*//** * Starts one single step experiment. */void startOneSingleStepExperiment(struct XCS *xcs, FILE *tabFile, struct xClassifierSet **pop, 				  double **averages, int expnr, FILE *tabFile2){  int trialCounter, exploit=1;  int correct[xcs->testFrequency];  double sysError[xcs->testFrequency];  char state[getConditionLength()+1];  /* set the \0 char at the end of the problem instance coded in 'state' */  state[getConditionLength()]='\0';  /* Start one experiment, trialCounter counts the number of learning problems (trials) */  for( trialCounter=0; trialCounter<xcs->maxNrSteps; trialCounter+=exploit) {    /* change from explore to exploit and backwards */    exploit= (exploit+1)%2;    /* get the next problem instance */    resetState(state);    if(!exploit)      /* learn from the problem instance */      doOneSingleStepProblemExplore(xcs, pop, state, trialCounter);    else      /* test on the problem instance */      doOneSingleStepProblemExploit(xcs, pop, state, trialCounter, 				    &correct[trialCounter%(xcs->testFrequency)], 				    &sysError[trialCounter%(xcs->testFrequency)]);        /* monitor performance in data file every 'testFrequency' steps */    if( trialCounter%xcs->testFrequency==0 && exploit && trialCounter>0 ) {      writePerformance(xcs, tabFile, *pop, correct, sysError, trialCounter, averages, expnr);    }  }  /* check if final performance should be written */  if( trialCounter%xcs->testFrequency==0 && exploit && trialCounter>0 ) {    writePerformance(xcs, tabFile, *pop, correct, sysError, trialCounter, averages, expnr);  }}/** * Execute one explore step in a single step problem  */void doOneSingleStepProblemExplore(struct XCS *xcs, struct xClassifierSet **pop, char *state, int trialCounter){          struct xClassifierSet *mset, *aset, *killset=NULL;  int action;  double reward=0., *predictionArray;  int correct;  /* we need space for the prediction array */  assert((predictionArray = (double *)calloc(getNumberOfActions(),sizeof(double)))!=0);    /* get the match set */  mset=getMatchSet(xcs, state, pop, &killset, trialCounter);  /* no updates are necessary in this case */  freeSet(&killset);  /* generate the prediction array */  getPredictionArray(mset, predictionArray, xcs);      /* get the action, that wins considering the prediction array (usually this is random action selection ) */  action = learningActionWinner( predictionArray, xcs->exploreProb);    /* execute the action and get reward   * correct represents a boolean for the right or wrong action */  reward = doAction(state, action, &correct);    /* get the action set according to the chosen action */  aset = getActionSet(action, mset);    /* give immediate reward */  adjustActionSet(xcs, &aset,0,reward, pop, &killset);  /* no update of other sets necessary here */  freeSet(&killset);      /* exectue the discovery mechanism (GA) */  discoveryComponent(&aset,pop,&killset,trialCounter, state, xcs, reward);    /* no update of other sets necessary here */  freeSet(&killset);    /* Clean up */  freeSet(&mset);  freeSet(&aset);  free(predictionArray);}/** * Execute one exploit step in a single step problem (for perforamance monitoring).  */void doOneSingleStepProblemExploit(struct XCS *xcs, struct xClassifierSet **pop, char *state,				   int trialCounter, int *correct, double *sysError){  struct xClassifierSet *mset, *killset=NULL;  int action;  double reward=0., *predictionArray;    /* we need room for the prediction array */  assert((predictionArray = (double *)calloc(getNumberOfActions(),sizeof(double)))!=0);    /* get the match set*/  mset=getMatchSet(xcs, state, pop, &killset, trialCounter);    /* no updates are necessary in this case */  freeSet(&killset);    /* generate the prediction array */  getPredictionArray(mset, predictionArray, xcs);    /* get the action, that has the highest value in the prediction array */  action = deterministicActionWinner(predictionArray);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -