⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 knn.c

📁 机器学习作者tom mitchell的书上代码
💻 C
📖 第 1 页 / 共 2 页
字号:
/* Weight-setting and scoring implementation for Naive-Bayes classification *//* Copyright (C) 1997, 1998, 1999 Andrew McCallum   Written by:  Andrew Kachites McCallum <mccallum@cs.cmu.edu>   This file is part of the Bag-Of-Words Library, `libbow'.   This library is free software; you can redistribute it and/or   modify it under the terms of the GNU Library General Public License   as published by the Free Software Foundation, version 2.      This library is distributed in the hope that it will be useful,   but WITHOUT ANY WARRANTY; without even the implied warranty of   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU   Library General Public License for more details.   You should have received a copy of the GNU Library General Public   License along with this library; if not, write to the Free Software   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111, USA */#include <bow/libbow.h>#include <math.h>#include <argp/argp.h>/* My reading of the SMART documentation and code makes me think that thevarious tf weight options - particularly 'l' and 'a' apply only towords that occur in the document in question - which avoids any takingthe log of 0. If you want to check this, look at the table athttp://pi0959.kub.nl:2080/Paai/Onderw/Smart/examp_10.html (Linked to by the Advanced actions part of the SMART tutorial athttp://pi0959.kub.nl:2080/Paai/Onderw/Smart/hands-on-tekst.html#advanced)and also look at the tfwt_log function in the SMART source tree atsrc/libconvert/weights_tf.c.The weighting options implemented here are:Position 1 - TF. If f == 0 then TF == 0. Otherwise, for f > 0'n' - none     - f'b' - binary   - 1'm' - max-norm - f / (max f in doc)'a' - aug-norm - 0.5 + 0.5 * (f / (max f in doc))'l' - log      - 1.0 + ln(f)Position 2 - IDF.'n' - none     - 1.0't' - tfidf    - ln (total docs / docs containing term)Position 3 - NORM'n' - none     - 1.0'c' - cosine   - 1 / sqrt (sum (tf * idf)**2)*//* Command-line options specific to kNN *//* Default value for option "knn_k", the number of neighbours to look   at. */static int knn_k = 30;/* Default values for the weighting schemes */static char query_weights[4] = "nnn";static char doc_weights[4] = "nnn";/* The integer or single char used to represent this command-line option.   Make sure it is unique across all libbow and rainbow. */#define KNN_K_KEY 4001#define KNN_WEIGHTING_KEY 4002static struct argp_option knn_options[] ={  {0,0,0,0,   "K-nearest neighbor options, --method=knn:", 40},  {"knn-k", KNN_K_KEY, "K", 0,   "Number of neighbours to use for nearest neighbour. Defaults to "   "30."},  {"knn-weighting", KNN_WEIGHTING_KEY, "xxx.xxx", 0,   "Weighting scheme to use, coded like SMART. Defaults to nnn.nnn"   "The first three chars describe how the model documents are"   "weighted, the second three describe how the test document is"   "weighted. The codes for each position are described in knn.c."   "Classification consists of summing the scores per class for the"   "k nearest neighbour documents and sorting."  },  {0, 0}};error_tknn_parse_opt (int key, char *arg, struct argp_state *state){  switch (key)    {    case KNN_K_KEY:      knn_k = atoi (arg);      break;    case KNN_WEIGHTING_KEY:      /* Arg is a string that we need to split into two bits */      strncpy(query_weights, arg, 3);      strncpy(doc_weights, arg + 4, 3);      break;    default:      return ARGP_ERR_UNKNOWN;    }  return 0;}static const struct argp knn_argp ={  knn_options,  knn_parse_opt};static struct argp_child knn_argp_child ={  &knn_argp,     		/* This child's argp structure */  0,				/* flags for child */  0,				/* optional header in help message */  0				/* arbitrary group number for ordering */};/* End of command-line options specific to kNN *//* Some useful macros */#define TF_M(x) ((x)[0] == 'm')#define TF_B(x) ((x)[0] == 'b')#define TF_A(x) ((x)[0] == 'a')#define TF_L(x) ((x)[0] == 'l')#define TF_N(x) ((x)[0] == 'n')#define IDF_T(x) ((x)[1] == 't')#define NORM_C(x) ((x)[2] == 'c')/* Function to assign tfidf weights to every word in the barrel   according to the contents of doc_weights */voidbow_knn_set_weights (bow_barrel *barrel){  int di;  bow_cdoc *cdoc;  int wi;			/* a "word index" into WI2DVF */  int max_wi;			/* the highest "word index" in WI2DVF. */  bow_dv *dv;			/* the "document vector" at index WI */  int dvi;			/* an index into the DV */  int num_docs, total_model_docs;  /* We assume we are dealing with the full document barrel - no      whimpy vector-per-class stuff here. */      assert (!strcmp (barrel->method->name, "knn"));  max_wi = MIN (barrel->wi2dvf->size, bow_num_words());  /* Step one calculate the number of documents in the model. We'll     use this for the idf calculation later on. Also, reset each     document's word_count - we're going to use this to store the max     tf in the document which is needed by some of the tf weighting     methods. */  total_model_docs = 0;  for (di = 0; di < barrel->cdocs->length; di++)    {      cdoc = bow_cdocs_di2doc(barrel->cdocs, di);      if (cdoc->type == bow_doc_train) 	{	  total_model_docs++;	  cdoc->word_count = 0;	}    }  /* Step two - we can calculate weights for the b, l and n weighting     schemes now. For a we can calculate the max tf in each     document. We can also calculate the idf term and store it. */  for (wi = 0; wi < max_wi; wi++)    {      /* Count the number of model docs this word occurs in */      num_docs = 0;	        dv = bow_wi2dvf_dv (barrel->wi2dvf, wi);      if (dv == NULL) 	continue;       for (dvi = 0; dvi < dv->length; dvi++)  	{ 	  cdoc = bow_cdocs_di2doc(barrel->cdocs, dv->entry[dvi].di);	  if (cdoc->type == bow_doc_train)	    {	      num_docs++;	      /* Set some weights */	      if (TF_B(doc_weights))		{		  /* Binary counts */		  dv->entry[dvi].weight = 1;		}	      else if (TF_L(doc_weights))		{		  /* 1 + ln(tf) */		  dv->entry[dvi].weight = 1 + log(dv->entry[dvi].count);		}	      else if (TF_N(doc_weights))		{		  /* tf */		  dv->entry[dvi].weight = dv->entry[dvi].count;		}	      else		{		  /* Update the max tf count */		  if (cdoc->word_count < dv->entry[dvi].count)		    {		      cdoc->word_count = dv->entry[dvi].count;		    }		}	    }	}      /* Set up the IDF for this word */      dv->idf = log((double)total_model_docs / (double)num_docs);    }  /* Final Step - calculate weights for methods that use max tf     stuff. Also multiply in the IDF terms. */   for (wi = 0; wi < max_wi; wi++)     {       dv = bow_wi2dvf_dv (barrel->wi2dvf, wi);       if (dv == NULL)  	continue;        for (dvi = 0; dvi < dv->length; dvi++)   	{  	  cdoc = bow_cdocs_di2doc(barrel->cdocs, dv->entry[dvi].di); 	  if (cdoc->type == bow_doc_train) 	    {	      if (TF_A(doc_weights))		{		  /* 0.5 + 0.5 * (tf / max_tf_in_doc) */		  dv->entry[dvi].weight = 0.5 + 0.5 * ((double)dv->entry[dvi].count / (double)cdoc->word_count);		}	      else if (TF_M(doc_weights))		{		  /* tf / max_tf_in_doc */		  dv->entry[dvi].weight = (double)dv->entry[dvi].count / (double)cdoc->word_count;		}	      /* Do the IDF */	      if (IDF_T(doc_weights))		  dv->entry[dvi].weight *= dv->idf;	    }	}    }  /* Now our barrel has the tf*idf weight for each term in each     document in our model */}void bow_knn_normalise_weights (bow_barrel *barrel){  /* This puts the euclidian doc length in cdoc->normalizer for each     document in the model. */  if (NORM_C(doc_weights))    {      bow_barrel_normalize_weights_by_vector_length(barrel);    }}bow_barrel *bow_knn_classification_barrel (bow_barrel *barrel){  /* Just use the doc barrel - set the weights, normalise and return. */  bow_knn_set_weights(barrel);  bow_knn_normalise_weights(barrel);  return barrel;}/* Set the weights for the query word vector according to the   weighting scheme in query_weights */void bow_knn_query_set_weights(bow_wv *query_wv, bow_barrel *barrel){  bow_dv *dv;  int wvi, max_tf;  /* null to statement to avoid compilation warning */  barrel = barrel;  /* Pass one - set weights for b,l or n. Figure out the maximum     word frequency of the document. */  max_tf = 0;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -