⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 info_gain.c

📁 机器学习作者tom mitchell的书上代码
💻 C
📖 第 1 页 / 共 2 页
字号:
/* Functions to calculate the information gain for each word in our corpus. *//* Copyright (C) 1997, 1998, 1999 Andrew McCallum   Written by:  Sean Slattery <slttery@cs.cmu.edu>   and Andrew Kachites McCallum <mccallum@cs.cmu.edu>   This file is part of the Bag-Of-Words Library, `libbow'.   This library is free software; you can redistribute it and/or   modify it under the terms of the GNU Library General Public License   as published by the Free Software Foundation, version 2.      This library is distributed in the hope that it will be useful,   but WITHOUT ANY WARRANTY; without even the implied warranty of   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU   Library General Public License for more details.   You should have received a copy of the GNU Library General Public   License along with this library; if not, write to the Free Software   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111, USA */#include <bow/libbow.h>#include <math.h>#if !HAVE_LOG2F#define log2f log#endif/* Return the entropy given counts for each type of element. */doublebow_entropy (float *counts, int num_counts){  double total = 0;		/* How many elements we have in total */  double entropy = 0.0;  double fraction;  int i;  /* First total the array. */  for (i = 0; i < num_counts; i++)    total += counts[i];  /* If we have no elements, then the entropy is zero. */  if (total == 0) {    return 0.0;  }  /* Now calculate the entropy */  for (i = 0; i < num_counts; i++)    {      if (counts[i] != 0)	{	  fraction = counts[i] / total;	  entropy -= fraction * log2f (fraction);	}    }  return entropy;}/* Return a malloc()'ed array containing an infomation-gain score for   each word index. */float *bow_infogain_per_wi_new_document_event (bow_barrel *barrel, int num_classes, 					int *size){  float grand_totals[num_classes];  /* Totals for each class. */  float with_word[num_classes];	    /* Totals for the set of model docs				       with this word. */  float without_word[num_classes];  /* Totals for the set of model docs				       without this word. */  int max_wi;			    /* the highest "word index" in WI2DVF. */  bow_cdoc *doc;                    /* The working cdoc. */  double total_entropy;             /* The entropy of the total collection. */  double with_word_entropy;         /* The entropy of the set of docs with				       the word in question. */  double without_word_entropy;      /* The entropy of the set of docs without				       the word in question. */  float grand_total = 0;   float with_word_total = 0;  float without_word_total = 0;  int i, j, wi, di;  bow_dv *dv;  float *ret;  bow_verbosify (bow_progress, 		 "Calculating info gain... words ::          ");  max_wi = MIN (barrel->wi2dvf->size, bow_num_words());  *size = max_wi;  ret = bow_malloc (max_wi * sizeof (float));  /* First set all the arrays to zero */  for(i = 0; i < num_classes; i++)     {      grand_totals[i] = 0;      with_word[i] = 0;      without_word[i] = 0;    }  /* Now set up the grand totals. */  for (i = 0; i < barrel->cdocs->length ; i++)    {      doc = bow_cdocs_di2doc (barrel->cdocs, i);      if (doc->type == bow_doc_train) 	{	  grand_totals[doc->class] += doc->prior;	  grand_total += doc->prior;	}    }  /* Calculate the total entropy */  total_entropy = bow_entropy (grand_totals, num_classes);  /* Now loop over all words. */  for (wi = 0; wi < max_wi; wi++)     {      /* Get this document vector */      dv = bow_wi2dvf_dv (barrel->wi2dvf, wi);      if (dv == NULL)	{	  ret[wi] = 0;	  continue;	}      with_word_total = 0;      /* Create totals for this dv. */      for (j = 0; j < dv->length; j++)	{	  di = dv->entry[j].di;	  doc = bow_cdocs_di2doc (barrel->cdocs, di);	  if (doc->type == bow_doc_train)	    {	      with_word[doc->class] += doc->prior;	      with_word_total += doc->prior;	    }	}      /* Create without word totals. */      for (j = 0; j < num_classes; j++)	{	  without_word[j] = grand_totals[j] - with_word[j];	}      without_word_total = grand_total - with_word_total;      /* Calculate entropies */      with_word_entropy = bow_entropy(with_word, num_classes);      without_word_entropy = bow_entropy(without_word, num_classes);      /* Calculate and store the information gain. */      ret[wi] = (total_entropy 		 - ((((double)with_word_total / (double)grand_total) 		     * with_word_entropy)		    + (((double)without_word_total / (double)grand_total) 		       * without_word_entropy)));      /* Not comparing with 0 here because of round-off error. */      assert (ret[wi] >= -1e-7);      if (ret[wi] < 0)	ret[wi] = 0;      /* Reset arrays to zero */      for(i = 0; i < num_classes; i++) 	{	  with_word[i] = 0;	  without_word[i] = 0;	}      if (wi % 100 == 0)	bow_verbosify (bow_progress,		       "\b\b\b\b\b\b\b\b\b%9d", max_wi - wi);    }  bow_verbosify (bow_progress, "\n");  return ret;}/* Return a malloc()'ed array containing an infomation-gain score for   each word index. */float *bow_infogain_per_wi_new_word_event (bow_barrel *barrel, int num_classes, 				    int *size){  float grand_totals[num_classes];  /* Totals for each class. */  float with_word[num_classes];	    /* Totals for the set of model docs				       with this word. */  float without_word[num_classes];  /* Totals for the set of model docs				       without this word. */  int max_wi;			    /* the highest "word index" in WI2DVF. */  bow_cdoc *doc;                    /* The working cdoc. */  double total_entropy;             /* The entropy of the total collection. */  double with_word_entropy;         /* The entropy of the set of docs with				       the word in question. */  double without_word_entropy;      /* The entropy of the set of docs without				       the word in question. */  float grand_total;   float with_word_total = 0;  float without_word_total = 0;  int i, j, wi, di;  bow_dv *dv;  float *ret;  bow_verbosify (bow_progress, 		 "Calculating info gain... words ::          ");  max_wi = MIN (barrel->wi2dvf->size, bow_num_words());  *size = max_wi;  ret = bow_malloc (max_wi * sizeof (float));  /* First set the arrays to zero */  for(i = 0; i < num_classes; i++)     grand_totals[i] = 0;  grand_total = 0;  /* Now set up the grand totals. */  for (wi = 0; wi < max_wi; wi++)     {      /* Get this document vector */      dv = bow_wi2dvf_dv (barrel->wi2dvf, wi);      if (dv == NULL)	continue;      for (j = 0; j < dv->length; j++)	{	  di = dv->entry[j].di;	  doc = bow_array_entry_at_index (barrel->cdocs, di);	  if (doc->type == bow_doc_train)	    {	      grand_totals[doc->class] += dv->entry[j].count;	      grand_total += dv->entry[j].count;	    }	}    }  /* Calculate the total entropy */  total_entropy = bow_entropy (grand_totals, num_classes);  /* Now calculate the information gain of each word. */  for (wi = 0; wi < max_wi; wi++)     {      /* Get this document vector */      dv = bow_wi2dvf_dv (barrel->wi2dvf, wi);      if (dv == NULL)	{	  ret[wi] = 0;	  continue;	}      /* Reset arrays to zero */      for(i = 0; i < num_classes; i++) 	{	  with_word[i] = 0;	  without_word[i] = 0;	}      with_word_total = 0;      /* Create totals for this dv. */      for (j = 0; j < dv->length; j++)	{	  di = dv->entry[j].di;	  doc = bow_cdocs_di2doc (barrel->cdocs, di);	  if (doc->type == bow_doc_train)	    {	      with_word[doc->class] += dv->entry[j].count;	      with_word_total += dv->entry[j].count;	    }	}      /* Create without word totals. */      for (j = 0; j < num_classes; j++)	{	  without_word[j] = grand_totals[j] - with_word[j];	}      without_word_total = grand_total - with_word_total;      /* Calculate entropies */      with_word_entropy = bow_entropy(with_word, num_classes);      without_word_entropy = bow_entropy(without_word, num_classes);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -