⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 copy of adaboost_common.cpp

📁 it is an adaboost weak learner
💻 CPP
📖 第 1 页 / 共 2 页
字号:
	// prediction=zeros(num(1),1);
	Matrix* prediction = new Matrix(train_row,value_one);
    free(size);
	// geting the train error
	/*****************************************************************************/
	/* for h=1:d(1)                                                              */
	/*   prediction=prediction-log(boost(h,1))*(train(:,boost(h,2))>=boost(h,3)) */
    /* end                                                                       */
	/*****************************************************************************/
	//for (int h=1; h <= boost_row; h++){
    for (int h=1; h < boost_row; h++){
		//boost(h,3)
		double val_ttt=0;	  
		Matrix* matrix_val_ttt = new Matrix(h,value_one);
		matrix_val_ttt->getValueSpecific(h,0,&val_ttt,*ttt);
		
		// boost(h,2)
		double val_iii=0;
		matrix_val_ttt->getValueSpecific(h,0,&val_iii,*iii);
		
		// train(:,boost(h,2))
		Matrix* partOfTrainMatrix = new Matrix(train_row,val_iii);
		partOfTrainMatrix->specificPartOfMatrix(*train_matrix,train_row,val_iii);
		
		// (train(:,boost(h,2))>=boost(h,3))
		Matrix train_error_boost = (*partOfTrainMatrix) > val_ttt;  
		
		// (boost(h,1))
		double val_boost;
		matrix_val_ttt->getValueSpecific(h,0,&val_boost,*boost);
		
		// log(boost(h,1))
		double log_val_boost = log(val_boost);
		
		// log(boost(h,1))*(train(:,boost(h,2))>=boost(h,3))
		Matrix* boostMultiTrain = new Matrix(train_row, value_one);
		boostMultiTrain->ScalarMultiMatrix(log_val_boost,train_error_boost);
		
		// prediction-log(boost(h,1))*(train(:,boost(h,2))>=boost(h,3))		
		prediction->zeros(train_row,value_one);
		prediction->matrixMinusMatrix(*prediction,*boostMultiTrain);

		// free help matrix
		//free(prediction);
		free(matrix_val_ttt);
		free(partOfTrainMatrix);
		free(boostMultiTrain);
	}
	/*********************************/
	/* temp=-sum(log(boost(:,1)))/2; */
	/*********************************/
	// log(boost(:,1))
	Matrix* log_boost = new Matrix(boost_row,boost_col);
	log_boost->matrixLog(*boost);
	// sum(log(boost(:,1)))
	double sum_log_boost = 0;
	Matrix* sum_log_boost_matrix = new Matrix(boost_row,boost_col);
	sum_log_boost_matrix->matrixSumRow(&sum_log_boost,*log_boost);
	double temp = 0;
	temp = -1 * (sum_log_boost/2);
	
	/********************************************************************/
	/* errorTrain=sum(abs((train_label>=5)-(prediction>=temp)))/num(1); */
	/********************************************************************/
	// (prediction>=temp)
	Matrix prediction_minus_temp = (*prediction) > temp;  
	
	// (train_label>=5)
	Matrix train_label_minus_value = (*train_label_matrix) > 3; 
	
	// ((train_label>=5)-(prediction>=temp))
	Matrix* train_label_minus_prediction = new Matrix(train_row,boost_col+1);
	train_label_minus_prediction->matrixMinusMatrix(train_label_minus_value,prediction_minus_temp);
	
	// abs((train_label>=5)-(prediction>=temp))
	Matrix* abs_train_label_minus_prediction = new Matrix(train_row,boost_col+1);
	abs_train_label_minus_prediction->matrixAbs(*train_label_minus_prediction);
	
	// sum(abs((train_label>=5)-(prediction>=temp)))
	double sum_abs_train_label_minus_prediction = 0;
	Matrix* sum_abs_train_label_minus_prediction_matrix = new Matrix(train_row,boost_col+1);
	sum_abs_train_label_minus_prediction_matrix->matrixSumRow(&sum_abs_train_label_minus_prediction,*abs_train_label_minus_prediction);
	
	
	// errorTrain=sum(abs((train_label>=5)-(prediction>=temp)))/num(1)
	errorTrain->scalarDivisonScalar(sum_abs_train_label_minus_prediction,train_row, *errorTrain);
	
	/*****************************/
	/* prediction=zeros(1000,1); */
	/*****************************/
    Matrix* prediction_test = new Matrix(test_row,value_one);
	prediction_test->zeros(test_row,value_one);
	
    // geting the test error
	/******************************************************************************/
    /* for h=1:d(1)																  */
    /*    prediction=prediction-log(boost(h,1))*(test(:,boost(h,2))>=boost(h,3)); */
    /* end                                                                        */
	/******************************************************************************/
    for (int hh=1; hh < boost_row; hh++){
		//boost(h,3)
		double val_ttt;	  
		Matrix* matrix_val_ttt = new Matrix(hh,value_one);
		matrix_val_ttt->getValueSpecific(hh,0,&val_ttt,*ttt);
		
		// boost(h,2)
		double val_iii;
		matrix_val_ttt->getValueSpecific(hh,0,&val_iii,*iii);
		
		// train(:,boost(h,2))
		Matrix* partOfTestMatrix = new Matrix(test_row,val_iii);
		partOfTestMatrix->specificPartOfMatrix(*test_matrix,test_row,val_iii);
		
		// (train(:,boost(h,2))>=boost(h,3))
		Matrix test_error_boost = (*partOfTestMatrix) > val_ttt;  
		
		// (boost(h,1))
		double val_boost;
		matrix_val_ttt->getValueSpecific(hh,0,&val_boost,*boost);
		
		// log(boost(h,1))
		double log_val_boost = log(val_boost);
		
		// log(boost(h,1))*(train(:,boost(h,2))>=boost(h,3))
		Matrix* boostMultiTest = new Matrix(test_row,value_one);
		boostMultiTest->ScalarMultiMatrix(log_val_boost,test_error_boost);
		
		// prediction-log(boost(h,1))*(train(:,boost(h,2))>=boost(h,3))
		prediction_test->matrixMinusMatrix(*prediction_test,*boostMultiTest);

		free(matrix_val_ttt);
		free(partOfTestMatrix);
		free(boostMultiTest);
	}
	
	/****************************************************************/
    /* errorTest=sum(abs((test_label>=5)-(prediction>=temp)))/1000; */
	/****************************************************************/
    // (prediction>=temp)
	Matrix prediction_minus_temp_test = (*prediction_test) > temp;  
	
	// (train_label>=5)
	Matrix test_label_minus_value = (*test_label_matrix) > 3; 
	
	// ((train_label>=5)-(prediction>=temp))
	Matrix* test_label_minus_prediction = new Matrix(test_row,boost_col+1);
	test_label_minus_prediction->matrixMinusMatrix(test_label_minus_value,prediction_minus_temp_test);
	
	// abs((train_label>=5)-(prediction>=temp))
	Matrix* abs_test_label_minus_prediction = new Matrix(test_row,boost_col+1);
	abs_test_label_minus_prediction->matrixAbs(*test_label_minus_prediction);
	
	// sum(abs((train_label>=5)-(prediction>=temp)))
	double sum_abs_test_label_minus_prediction = 0;
	Matrix* sum_abs_test_label_minus_prediction_matrix = new Matrix(test_row,boost_col+1);
	sum_abs_test_label_minus_prediction_matrix->matrixSumRow(&sum_abs_test_label_minus_prediction,*abs_test_label_minus_prediction);
	
	
	// errorTrain=sum(abs((train_label>=5)-(prediction>=temp)))/num(1)
	errorTest->scalarDivisonScalar(sum_abs_test_label_minus_prediction,test_row, *errorTest);
	
	//free
//	free(size);
	free(prediction);
    free(prediction_test);
//	free(log_boost);
	free(sum_log_boost_matrix);
	free(train_label_minus_prediction);
	free(abs_train_label_minus_prediction);
	free(sum_abs_train_label_minus_prediction_matrix);
	free(test_label_minus_prediction);
	free(abs_test_label_minus_prediction);
	return (0);
}

			 

int parse_dataset(char *line,long *train_label,long*numwords,long train_max_words_doc)
{
	register long wpos,pos;
	//long wnum;
	//double weight;
	
	pos=0;
	int len;
    len = strlen(line);
    len=len-2;
	while(line[len]) 
	{      /* cut off comments */
		if(line[pos] == '#') 
		{
			line[pos]=0;
		}
		else 
		{
			pos++;
		}
	}
	wpos=0;
	if((sscanf(line,"%ld",train_label)) == EOF) return(0);
	pos=0;
}

int read_documents(char *docfile,
					DOC  *docs,
					long *label,
					long max_words_doc,
					long ll,
					long *totwords,
					long *totdoc)
{
	char *line;
	DOC doc;
	long dnum=0,wpos,i,dpos=0,dneg=0,dunlab=0;
	long doc_label;
	FILE *docfl;

	line = (char *)my_malloc(sizeof(char)*ll);
	if ((docfl = fopen (docfile, "r")) == NULL)
	{ 
		printe (docfile);  
		return -1;
	}
	
	doc.words = (SVM_WORD *)my_malloc(sizeof(SVM_WORD)*(max_words_doc+10));
	if (com_pro.show_readfile)
	{
		sprintf(temstr,"Reading examples into memory..."); 
		printm(temstr);
	}
	
	dnum=0;
	(*totwords)=0;
	while((!feof(docfl)) && fgets(line,(int)ll,docfl)) {
		if(line[0] == '#') continue;  /* line contains comments */
		if(!parse_document(line,&doc,&doc_label,&wpos,max_words_doc)) 
		{
			sprintf(temstr,"Parsing error in line %ld!",dnum);
			printm(temstr);
		}
		label[dnum]=doc_label;
		if(doc_label > 0) dpos++;
		if (doc_label < 0) dneg++;
		if (doc_label == 0) dunlab++;
		if((wpos>1) && ((doc.words[wpos-2]).wnum>(*totwords))) 
			(*totwords)=(doc.words[wpos-2]).wnum;
		docs[dnum].words = (SVM_WORD *)my_malloc(sizeof(SVM_WORD)*wpos);
		docs[dnum].docnum=dnum;
		for(i=0;i<wpos;i++) 
			docs[dnum].words[i]=doc.words[i];
		docs[dnum].twonorm_sq=doc.twonorm_sq;
		dnum++;  
		if((dnum % 100) == 0&&com_pro.show_readfile) 
		{
			sprintf(temstr,"read %ld..",dnum); 
			printm(temstr);
		}
	} 
	
	fclose(docfl);
	free(line);
	free(doc.words);
	if (com_pro.show_action)
	{
		sprintf(temstr, "OK. (%ld examples read)", dnum);
		printm(temstr);
		sprintf(temstr,"%ld positive, %ld negative, and %ld unlabeled examples.",dpos,dneg,dunlab); 
		printm(temstr);
	}
	(*totdoc)=dnum;
}
/*Parse one line of data file */
int parse_document(char *line,DOC *doc,long *label,long*numwords,long max_words_doc)
{
	register long wpos,pos;
	long wnum;
	double weight;
	
	pos=0;
	while(line[pos]) 
	{      /* cut off comments */
		if(line[pos] == '#') 
		{
			line[pos]=0;
		}
		else 
		{
			pos++;
		}
	}
	wpos=0;
	if((sscanf(line,"%ld",label)) == EOF) return(0);
	pos=0;
	while(line[pos]==' ') pos++;
	while(line[pos]>' ') pos++;
	char sring_weight[5];
	char NaN[5]="NaN";
	int result;
	while((sscanf(line+pos,"%ld:%lf",&wnum,&weight)!= EOF) && (wpos<max_words_doc))
	{
		if(sscanf(line+pos,"%ld:%s",&wnum,&sring_weight)!= EOF)
		{
		     result=strcmp(NaN,sring_weight);
		     if(result == 0)
			 {
                weight = 0;
			 }	  
		}
		while(line[pos++]==' ');
		while(line[++pos]>' ');
		if(wnum<=0) 
		{ 
			printe ("Feature numbers must be larger or equal to 1!!!"); 
			sprintf(temstr,"LINE: %s",line);
			printm(temstr);
			return (0);
		 
		}
		if((wpos>0) && ((doc->words[wpos-1]).wnum >= wnum))
		{ 
			printe ("Features must be in increasing order!!!"); 
			sprintf(temstr,"LINE: %s",line);
			printm(temstr);
			return (0);
		 
		}
		(doc->words[wpos]).wnum=wnum;
 		(doc->words[wpos]).weight=weight; 
		wpos++;
	
	}
	(doc->words[wpos]).wnum=0;
	(*numwords)=wpos+1;
	doc->docnum=-1;
	//doc->twonorm_sq=sprod_ss(doc->words,doc->words);
	return(1);
}
/* grep through file and count number of lines, 
maximum number of spaces per line, and 
longest line. */
int nol_ll(char *file,long *nol,long *wol,long *ll) 
{
	FILE *fl;
	int ic;
	char c;
	long current_length,current_wol;
	
	if ((fl = fopen (file, "r")) == NULL)
	{ 
		printe (file);   
		return -1;
	}
	current_length=0;
	current_wol=0;
	(*ll)=0;
	(*nol)=1;
	(*wol)=0;
	while((ic=getc(fl)) != EOF) 
	{
		c=(char)ic;
		current_length++;
		if(c == ' ') 
		{
			current_wol++;
		}
		if(c == '\n') 
		{
			(*nol)++;
			if(current_length>(*ll)) 
			{
				(*ll)=current_length;
			}
			if(current_wol>(*wol)) 
			{
				(*wol)=current_wol;
			}
			current_length=0;
			current_wol=0;
		}
	}
	fclose(fl);
}

long get_runtime() 
{
	clock_t start;
	start = clock();
	return((long)((double)start*100.0/(double)CLOCKS_PER_SEC));
}


int isnan(double a)
{
	return(_isnan(a));
}

void * my_malloc(long size) 
{
	void *ptr;
	ptr=(void *)malloc(size);
	if(!ptr)
	{ 
		printe ("Out of memory!"); 
		return (NULL);
	}
	return(ptr);
}
//print error on screen

void printe(char* str)
{
	char err[200]="--error--";
	strcat(err,str);
	theApp.ShowM(err);
}

//print message on screen
void printm(char* str)
{
	theApp.ShowM(str);
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -