⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 adaboost_common.cpp

📁 it is an adaboost weak learner
💻 CPP
📖 第 1 页 / 共 3 页
字号:
#include "stdafx.h"
#include "AdaBoost.h"
#include "Adaboost_common.h"
#include "math.h"
#include "matrix.h"
#include <time.h>

#include <stdlib.h>         /* For _MAX_PATH definition */
#include <stdio.h>
#include <malloc.h>
#include <memory.h>
#include <string.h>
#include <conio.h>
#include <dos.h>

#include <algorithm>
#include <iostream>
#include <vector>

#ifdef _WIN32
using namespace std;
#endif


#ifdef _DEBUG
#undef THIS_FILE
static char THIS_FILE[]=__FILE__;
#define new DEBUG_NEW
#endif

using namespace std ;
static char temstr[200];
typedef vector<int> INTVECTOR;

/*read the data from train documents*/
int read_train_file(char *trainfile,
					long train_max_words_doc,
					long train_max_docs,
					long train_ll,
					Matrix* train_matrix,
					Matrix *train_label_matrix)
{
	FILE *docfl;
	char *line;
    line = (char *)my_malloc(sizeof(char)*train_ll);
	if ((docfl = fopen (trainfile, "r")) == NULL)
	{ 
		printe (trainfile);  
		return -1;
	}
	
	if (com_pro.show_readfile)
	{
		sprintf(temstr,"Reading examples into memory..."); 
		printm(temstr);
	} 
	train_matrix->readFile(trainfile);
	int i=1;
	while((!feof(docfl)) && fgets(line,(int)train_ll,docfl))
	{	
		if (line[0] == '#') continue;  /* line contains comments */
		int len=0,train_label_end=0;
		len = strlen(line);
        len=len-2;			
		if((sscanf(line+len,"%ld",&train_label_end)) == EOF) return(0);
        //theVector.push_back(train_label_end);	
	    double value = (double) train_label_end;
		int one=0;
	    train_label_matrix->set(i,one,value);
        i++;
	}   
	// Matrix *vm_1 = new Matrix(r,c);
	// Matrix mv_1=(*train_label_matrix)*(*train_label_matrix);
	fclose(docfl);
}

/*read the data from test documents*/
int read_test_file(char *testfile,
				   long test_max_words_doc,
				   long test_max_docs,
				   long test_ll,
				   Matrix* test_matrix,
				   Matrix* test_label_matrix)
{
	FILE *docfl;
	char *line;
    line = (char *)my_malloc(sizeof(char)*test_ll);
	if ((docfl = fopen (testfile, "r")) == NULL)
	{ 
		printe (testfile);  
		return -1;
	}	
	if (com_pro.show_readfile)
	{
		sprintf(temstr,"Reading examples into memory..."); 
		printm(temstr);
	} 
    // Load test matrix from train file
	test_matrix->readFile(testfile);
	int i=1;
	while((!feof(docfl)) && fgets(line,(int)test_ll,docfl))
	{	
		if (line[0] == '#') continue;  /* line contains comments */
		int len=0,test_label_end=0;
		len = strlen(line);
        len=len-2;			
		if((sscanf(line+len,"%ld",&test_label_end)) == EOF) return(0);
	    double value = (double) test_label_end;
		int one=0;
	    test_label_matrix->set(i,one,value);
        i++;
	}
	fclose(docfl);	
}

/*************************************************************************************/
/* function boosted=adaBoost(train,train_label,cycles)                               */
/*  disp('running adaBoost algorithm');                                              */
/*  d=size(train);                                                                   */
/*	distribution=ones(1,d(1))/d(1);                                                  */
/*	error=zeros(1,cycles);                                                           */
/*	beta=zeros(1,cycles);                                                            */
/*	label=(train_label(:)>=5);% contain the correct label per vector                 */
/*	for j=1:cycles                                                                   */
/*        if(mod(j,10)==0)                                                           */
/*            disp([j,cycles]);                                                      */
/*        end                                                                        */
/*	  [i,t]=weakLearner(distribution,train,label);                                   */
/*    error(j)=distribution*abs(label-(train(:,i)>=t));                              */
/*    beta(j)=error(j)/(1-error(j));                                                 */
/*    boosted(j,:)=[beta(j),i,t];                                                    */
/*    distribution=distribution.* exp(log(beta(j))*(1-abs(label-(train(:,i)>=t))))'; */
/*    distribution=distribution/sum(distribution);                                   */
/* end                                                                               */
/*************************************************************************************/
int adaBoost(Matrix* train, 
			 Matrix* train_label,
			 long train_max_words_doc,
			 long train_max_docs,
			 int step,
			 int cycles,
			 Matrix* boost,
			 Matrix* iii,
			 Matrix* ttt,
			 double ave_delta)
{
    int r_train=train_max_docs+1; 
    int c_train=train_max_words_doc;
	int value_one = 1;
	int value_1000 = 1000;
	int cycles_num=0;
	Matrix* scalar_minus_error = new Matrix(value_one+1,step);

	if (step < 100){
		cycles_num=step;
	}

	if (step >= 100){
		cycles_num=100;
	}

	if (step < 100){
		step=r_train;
	}
    Matrix* distribution= new Matrix(value_one+1,step);	
    distribution->ones(value_one+1,step,value_one,step);
	Matrix* error= new Matrix(value_one+1,cycles);	
    error->zeros(value_one+1,cycles);
	Matrix* beta= new Matrix(value_one+1,cycles);	
    beta->zeros(value_one+1,cycles);

	// temprary Matrix
	Matrix* train_error_part = new Matrix(step,value_one+1);
	Matrix* error_label_train = new Matrix(step,value_one+1);
	Matrix* error_abs_label_train = new Matrix(step,value_one+1);
	Matrix* train_error_part_1 = new Matrix(step,value_one+1);
	Matrix* error_label_train_1 = new Matrix(step,value_one+1);
	Matrix* error_abs_label_train_1 = new Matrix(step,value_one+1);
	Matrix* scalar_minus_error_1 = new Matrix(step,value_one+1);
	Matrix* val_beta = new Matrix(value_one+1,value_one+1);
	Matrix* beta_label_train = new Matrix(step,value_one+1);
	Matrix* expMatrix = new Matrix(step,value_one+1);
	Matrix* transposeExpMatrix = new Matrix(value_one+1,step);
	Matrix* sumMatrix = new Matrix(value_one+1,step);
	Matrix* i_all = new Matrix(step,value_one);
	Matrix* t_all = new Matrix(step,value_one);

    double Array_boost[100];
	double Array_ttt[100];
	double Array_iii[100];
	Matrix* getArrayiii = new Matrix(cycles,value_one);
    Matrix* getArrayttt = new Matrix(cycles,value_one);
	Matrix* getArrayboost = new Matrix(cycles,value_one);

    // label_matrix are contain the correct label per vector
    Matrix label_matrix = (*train_label)>3;

    if (com_pro.show_action)
		printm("running adaBoost algorithm");
	for (int j=1; j <= cycles_num; j++)
	{
		if ((j % 10) == 0) 
		{
			if (com_pro.show_compute_1)
			{
              sprintf(temstr,"%d %d\n",j,cycles);
              printm(temstr);
			}
		}
        /************************************************/
		/* [i,t]=weakLearner(distribution,train,label); */
        /************************************************/
		int i,t;
        weakLearner(distribution,train,i_all,t_all,label_matrix,train_max_words_doc,train_max_docs,step, &i, &t,cycles,j,ave_delta);
		/*if (com_pro.show_compute_1)
		{
        sprintf(temstr,"i: %.d\n",i);
        printm(temstr);
		} 
		if (com_pro.show_compute_1)
		{
        sprintf(temstr,"t: %.d\n",t);
        printm(temstr);
		}*/
		/*****************************************************/
		/* error(j)=distribution*abs(label-(train(:,i)>=t)); */
		/*****************************************************/
		// train(:,i)>=t)	
		train_error_part->specificPartOfMatrix(*train,step,i);
		Matrix train_error = (*train_error_part)>t;
		
        // (label-(train(:,i)>=t))
        //Matrix error_label_train = (label_matrix)-(train_error);
		error_label_train->matrixMinusMatrix(train_error,label_matrix);
		// abs(label-(train(:,i)>=t))
		error_abs_label_train->matrixAbs(*error_label_train);
		
		// distribution*abs(label-(train(:,i)>=t))
        Matrix error_tmp = (*distribution)*(*error_abs_label_train);
		
		//error(j)=distribution*abs(label-(train(:,i)>=t));
		error->copy(j-1,error_tmp);

        /**********************************/
		/* beta(j)=error(j)/(1-error(j)); */
		/**********************************/
		double k = 1;
		scalar_minus_error->ScalarMinusMatrix(k,*error);
        Matrix beta = (*error) / (*scalar_minus_error); 
        
		/*******************************/
		/* boosted(j,:)=[beta(j),i,t]; */
		/*******************************/
         boost->copyToMatrix(j,beta);
		 iii->setValue(j,i);
		 ttt->setValue(j,t);
		
         getArrayboost->matrixToArray(Array_boost,*boost);
		 getArrayiii->matrixToArray(Array_iii,*iii);
         getArrayttt->matrixToArray(Array_ttt,*ttt);
		/*********************************************************************************/
        /* distribution=distribution.* exp(log(beta(j))*(1-abs(label-(train(:,i)>=t))))' */
        /*********************************************************************************/
		// (train(:,i)>=t)
		train_error_part_1->specificPartOfMatrix(*train,step,i);
		Matrix train_error_1 = (*train_error_part_1)>t;  
			
		// (label-(train(:,i)>=t))
        //Matrix error_label_train_1 = (label_matrix)-(train_error_1);
		error_label_train_1->matrixMinusMatrix(train_error_1,label_matrix);
		// abs(label-(train(:,i)>=t))
		error_abs_label_train_1->matrixAbs(*error_label_train_1);

		// (1-abs(label-(train(:,i)>=t))))
		scalar_minus_error_1->ScalarMinusMatrix(k,*error_abs_label_train_1);
		
		// log(beta(j))
        double y;
		double val;
        val_beta->getValue(j,&val,beta);
        y = log(val);
		
		// log(beta(j)) * (1-abs(label-(train(:,i)>=t))))
		beta_label_train->ScalarMultiMatrix(y,*scalar_minus_error_1);
        
		// exp(log(beta(j))*(1-abs(label-(train(:,i)>=t))))'
		/*
		int cycles_num=0;
		if (step < 100){
			cycles_num=20;
		}
		if (step >= 100){
			cycles_num=100;
		}
		*/
		expMatrix->matrixExp(*beta_label_train);
		transposeExpMatrix->matrixTranspose(step,value_one,*expMatrix);
		
		// distribution=distribution.* exp(log(beta(j))*(1-abs(label-(train(:,i)>=t))))';
		distribution->MatrixMultiMatrix(*distribution,*transposeExpMatrix);
		
		//distribution=distribution/sum(distribution);
        double sum_distribution=0;
		//double multi_step=0;
		sumMatrix->matrixSumCol(&sum_distribution, *distribution);        
		distribution->matrixdDivisonScalar(sum_distribution,*distribution);

        //multi_step= step / 100;
        //distribution->MatrixMultiScalar(multi_step,*distribution);

		//double distribution_value=0;
		//Matrix *tmp = new Matrix(value_one,value_one);
		//tmp->getValueSpecific(0,0,&distribution_value,*distribution);

		/*if (com_pro.show_compute_1)
		{
          sprintf(temstr,"distribution value: %f\n",distribution_value);
          printm(temstr);
		}*/
		//delete tmp;
	}
	// delete matrix
    delete train_error_part;
	delete error_label_train;
	delete error_abs_label_train;
	delete scalar_minus_error;
	delete train_error_part_1;
	delete error_label_train_1;
	delete error_abs_label_train_1;
	delete scalar_minus_error_1;
	delete val_beta;
	delete beta_label_train;
	delete sumMatrix;
	delete transposeExpMatrix;
	delete expMatrix;
	delete distribution;
	delete error;
	delete beta;
	delete i_all;
	delete t_all;
	delete getArrayiii;
    delete getArrayttt;
	delete getArrayboost;
	
	return(0);
}


/******************************************************************************************/
/*  function [i,t] = weakLearner(distribution,train,label)                                */
/*	 %disp('run weakLearner');                                                            */
/*   for tt=1:(16*256-1)                                                                  */
/*        error(tt)=distribution*abs(label-(train(:,floor(tt/16)+1)>=16*(mod(tt,16)+1))); */
/*   end                                                                                  */
/*  [val,tt]=max(abs(error-0.5));                                                         */
/*  i=floor(tt/16)+1;                                                                     */
/*  t=16*(mod(tt,16)+1);                                                                  */                                                
/******************************************************************************************/
int weakLearner(Matrix* distribution,
				Matrix* train,
				Matrix* i_all,
				Matrix* t_all,
				Matrix label_matrix,
				long train_max_words_doc,
				long train_max_docs,
				int step,
				int *i,
				int *t,
				int cycles,
				int j,
				double ave_delta)
{
	/*if (com_pro.show_action)
	//	printm("run weakLearner %d\n times, j");
	if (com_pro.show_compute_1)
	{
        sprintf(temstr,"run weakLearn %.d\n times",j);
        printm(temstr);
	}*/
	int r_train=train_max_docs+1; 
    int c_train=train_max_words_doc;
	float sqrt_c_train_tmp=sqrt(c_train);
	int sqrt_c_train = floor(sqrt_c_train_tmp);
    int value_one = 1;
    int modulus_number=0;
    int floor_number=1;
    int sizeOfsqrt_c_train=2*sqrt_c_train*(c_train- 1);
//    double val_error;		
    int counter=1;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -