📄 bayes.cc
字号:
/*--------------------------------------------------------------------------@COPYRIGHT : Copyright 1997, John Sled McConnell Brain Imaging Centre, Montreal Neurological Institute, McGill University. Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies. The author and McGill University make no representations about the suitability of this software for any purpose. It is provided "as is" without express or implied warranty.---------------------------------------------------------------------------- $RCSfile: bayes.cc,v $$Revision: 1.1.1.1 $$Author: jason $$Date: 2002/03/20 22:16:35 $$State: Exp $--------------------------------------------------------------------------*//* ----------------------------- MNI Header -----------------------------------@NAME : bayes.c@INPUT : @OUTPUT : @RETURNS : @DESCRIPTION: bayesian classifier@METHOD : @GLOBALS : @CALLS : @CREATED : August 21, 1996 (John Sled)@MODIFIED : $Log: bayes.cc,v $@MODIFIED : Revision 1.1.1.1 2002/03/20 22:16:35 jason@MODIFIED : first autoconfiscated version that compiles under linux gcc 3@MODIFIED :@MODIFIED : Revision 1.2 2000/08/16 18:18:28 jgsled@MODIFIED : Fixed problem in which the normalization factors for the exponentials@MODIFIED : were not being initialized when the training data was read from a file.@MODIFIED :@MODIFIED : Revision 1.1.1.1 1997/02/11 00:06:42 alex@MODIFIED : Sources for classify, copied from Vasken Kollokian@MODIFIED : * Revision 1.1 1996/08/29 03:54:52 vasco * Initial revision *---------------------------------------------------------------------------- */extern "C" {#include <volume_io.h>#include <limits.h>#include <math.h>}#include "../class_globals.h" void bayesian_allocate_memory(void);Real matrix_determinant(int dimension, Real **matrix);void scale_matrices(Real ***matrix, Real scale);/* locally defined global variables */static Real **mean_feature_matrix; /* matrix to reflect mean features */static Real ***covariance_matrix; /* covariance matrices for each class */static Real ***inv_covariance_matrix; /* inverse of covariance matrices for each class */static int *mean_feature_class_vector; /* vector to reflect mean feature classes */static Real *normalize_class_vector; /* normalization factor for each class */static Real **distance_vector; /* temporary storage for x - u */static Real *fuzzy_bayes_vector;static Real stdev_scale_factor;/* ----------------------------- MNI Header -----------------------------------@NAME : bayesian_init_training@INPUT : @OUTPUT : @RETURNS : @DESCRIPTION: @METHOD : @GLOBALS : @CALLS : @CREATED : August 21, 1996 (John Sled)@MODIFIED : ---------------------------------------------------------------------------- */void bayesian_init_training(char *param_filename /* parameter is ignored */){ FILE *param_file; /* if load_training is not used, then allocate space for struct */ if ( !load_train_filename ) { bayesian_allocate_memory(); } /* check to see if the filename is there */ if ( param_filename && !file_exists(param_filename) ) { (void) fprintf(stderr,"File `%s' doesn't exist !\n ", param_filename); exit(EXIT_FAILURE); } if( !param_filename) { stdev_scale_factor = 1.0; } else { if (verbose) fprintf(stdout, "Loading the parameter file %s\n", param_filename); /* open the parameter file, and read the values */ param_file = fopen(param_filename, "r"); if ( param_file == NULL) { fprintf(stderr, "Cannot open %s\n", param_filename); exit(EXIT_FAILURE); } /* scan for standard deviation scale factor */ fscanf( param_file, "scale=%f\n", &stdev_scale_factor); fclose(param_file); if ( stdev_scale_factor <= 0 ) { fprintf(stderr, "Scale parameter cannot be zero or negative.\n"); exit(EXIT_FAILURE); } } if ( debug > 2) { fprintf(stderr, "apriori flag = %s\n", (apriori)? "TRUE": "FALSE"); fprintf(stderr, "scale factor for deviation = %g\n", stdev_scale_factor); }}/* ----------------------------- MNI Header -----------------------------------@NAME : bayesian_allocate_memory@INPUT : @OUTPUT : @RETURNS : @DESCRIPTION: @METHOD : @GLOBALS : @CALLS : @CREATED : August 21, 1996 (John Sled)@MODIFIED : ---------------------------------------------------------------------------- */void bayesian_allocate_memory(void){ /* reserve area for the mean feature matrix */ ALLOC2D(mean_feature_matrix, num_classes, num_features); /* reserve area for the mean_feature_class_vector */ ALLOC(mean_feature_class_vector, num_classes); /* reserve area for the normalize_class_vector */ ALLOC(normalize_class_vector, num_classes); /* reserve area for the fuzzy_bayes_vector */ ALLOC( fuzzy_bayes_vector, num_classes ); /* reserve area for the covariance matrices */ ALLOC3D(covariance_matrix, num_classes, num_features, num_features); ALLOC3D(inv_covariance_matrix, num_classes, num_features, num_features); /* reserve working space for distance vectors */ ALLOC2D(distance_vector, num_classes, num_features);}/* ----------------------------- MNI Header -----------------------------------@NAME : bayesian_train_samples@INPUT : @OUTPUT : @RETURNS : ?@DESCRIPTION: takes a feature matrix and trains a classifier on it.@METHOD : @GLOBALS : @CALLS : @CREATED : August 21, 1996 (John Sled)@MODIFIED : ---------------------------------------------------------------------------- */void bayesian_train_samples(void){ int i, j, k, l; /* counters - samples, features, classes */ if (verbose) (void) fprintf(stderr, "Training samples ... \n"); /* initialize mean and covariance feature matrices, feature class vector & num of class samples */ for_less( i, 0, num_classes) { mean_feature_class_vector[i] = INT_MAX; /* big number to denote vacancy */ /* check that there are enough samples */ if(class_count[i] < num_features + 1) { (void) fprintf(stderr, "Not enough samples to train classifier\n"); exit(EXIT_FAILURE); } for_less( j, 0, num_features) { mean_feature_matrix[i][j] = 0.0; for_less(k, 0, num_features) covariance_matrix[i][j][k] = 0.0; } } /* Compute sample means */ /* repeat for the total number of samples */ for_less( i, 0, num_samples) { /* repeat for the total number of classes */ for_less( l, 0, num_classes ) { if ( mean_feature_class_vector[l] == INT_MAX || /* unoccupied spot */ mean_feature_class_vector[l] == class_column[i] ) { for_less( j, 0, num_features ) mean_feature_matrix[l][j] += feature_matrix[i][j]; /* if unoccupied, then assign class */ if ( mean_feature_class_vector[l] == INT_MAX ) mean_feature_class_vector[l] = class_column[i]; break; } } /* for l */ } /* for i */ if (verbose) (void) fprintf(stderr, "Generating mean feature matrix ...\n\n"); for_less( i, 0, num_classes) for_less( j, 0, num_features) if (class_count[i] != 0) mean_feature_matrix[i][j] /= (Real) class_count[i]; if (debug > 2 ) { fprintf( stderr, "Printing mean_feature_matrix ...\n"); for_less( i, 0, num_classes) { for_less( j, 0, num_features) fprintf( stderr, "%lf ", mean_feature_matrix[i][j]); fprintf( stderr, "%d\n", mean_feature_class_vector[i]); } fprintf( stderr, "-----\n"); } /* Compute sample covariances */ if (verbose) (void) fprintf(stderr, "Generating covariance feature matrix ...\n\n"); /* repeat for the total number of samples */ for_less( i, 0, num_samples) { /* repeat for the total number of classes */ for_less( l, 0, num_classes ) { if ( mean_feature_class_vector[l] == class_column[i] ) { for_less( j, 0, num_features ) for_less( k, j, num_features ) covariance_matrix[l][j][k] += (feature_matrix[i][j] - mean_feature_matrix[l][j]) * (feature_matrix[i][k] - mean_feature_matrix[l][k]); } } /* for l */ } /* for i */ /* fill in lower half of covariance matrixes */ for_less( l, 0, num_classes ) for_less( j, 0, num_features ) for_less( k, j+1, num_features ) covariance_matrix[l][k][j] = covariance_matrix[l][j][k]; /* normalize for number of samples taking in account uncertainty in mean */ for_less( l, 0, num_classes ) for_less( j, 0, num_features ) for_less( k, 0, num_features ) covariance_matrix[l][j][k] /= ((Real) class_count[l] - 1); if (debug > 2 ) { fprintf( stderr, "Printing covariance_matrix ...\n"); for_less( i, 0, num_classes) { fprintf( stderr, "\nCovariance for class %d\n", mean_feature_class_vector[i]); for_less( j, 0, num_features) { for_less( k, 0, num_features) fprintf( stderr, "%lf ", covariance_matrix[i][j][k]); fprintf(stderr, "\n"); } fprintf( stderr, "-----\n"); } } if (verbose) (void) fprintf(stderr, "Inverting covariance feature matrices ...\n\n"); for_less( l, 0, num_classes ) if(!invert_square_matrix(num_features, covariance_matrix[l], inv_covariance_matrix[l])) { (void) fprintf(stderr, "Covariance matrix for class %d is singular," " Training of classifier failed.\n", l); exit(EXIT_FAILURE); } /* compute normalization factors */ for_less( l, 0, num_classes ) { normalize_class_vector[l] = sqrt(matrix_determinant(num_features, inv_covariance_matrix[l]) / pow(2.0 * M_PI, num_features)); } if (debug > 6 ) { fprintf( stderr, "normalize_class_vector[] = "); for_less( l, 0, num_classes ) { fprintf( stderr, "%g ", normalize_class_vector[l]); } fprintf( stderr, "\n"); } /* put in scale factor for standard deviation */ scale_matrices(inv_covariance_matrix, 1.0/(stdev_scale_factor*stdev_scale_factor)); if (debug > 2 ) { fprintf( stderr, "Scaling inverse covariance by %g\n", 1.0/(stdev_scale_factor*stdev_scale_factor)); }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -