📄 adaboost_common.cpp
字号:
// prediction=zeros(num(1),1);
Matrix* prediction_train = new Matrix(step,value_one);
delete(size);
// geting the train error
/*****************************************************************************/
/* for h=1:d(1) */
/* prediction=prediction - log(boost(h,1)) * train_hypothesis */
/* end */
/*****************************************************************************/
//for (int h=1; h <= boost_row; h++){
for (int h=1; h < boost_row; h++){
// train_hypothesis
Matrix* partOfTrainHypothesis = new Matrix(step,value_one);
partOfTrainHypothesis->specificPartOfMatrix(*train_hypothesis,step,h-1);
// (boost(h,1))
Matrix* matrix_val_boost = new Matrix(value_one,value_one);
double val_boost=0;
matrix_val_boost->getValueSpecific(h-1,0,&val_boost,*boost);
// log(boost(h,1))
double log_val_boost = log(val_boost);
// log(boost(h,1)) * train_hypothesis
Matrix* boostMultiTrain = new Matrix(step, value_one);
boostMultiTrain->ScalarMultiMatrix(log_val_boost,*partOfTrainHypothesis);
// prediction-log(boost(h,1))*(train(:,boost(h,2))>=boost(h,3))
prediction_train->matrixMinusMatrix(*boostMultiTrain,*prediction_train);
//prediction_train->matrixMinusMatrix(*prediction_train,*boostMultiTrain);
//free matrix
delete matrix_val_boost;
delete partOfTrainHypothesis;
delete boostMultiTrain;
}
//double prediction_array[2296];
//Matrix* getPredictionArray = new Matrix(step,value_one);
//getPredictionArray->matrixToArray(prediction_array,*prediction_train);
/*********************************/
/* temp=-sum(log(boost(:,1)))/2; */
/*********************************/
// log(boost(:,1))
Matrix* log_boost = new Matrix(boost_row,boost_col);
log_boost->matrixLog(*boost);
// sum(log(boost(:,1)))
double sum_log_boost = 0;
Matrix* sum_log_boost_matrix = new Matrix(boost_row,boost_col);
sum_log_boost_matrix->matrixSumRow(&sum_log_boost,*log_boost);
double temp = 0;
if (sign == 1){
temp = -1*(sum_log_boost/3); // for the first Adaboost running
}
if (sign > 1){
temp = -1 * (sum_log_boost/2
); // for the second Adaboost running
}
/********************************************************************/
/* errorTrain=sum(abs((train_label>=5)-(prediction>=temp)))/num(1); */
/********************************************************************/
// (prediction>=temp)
Matrix prediction_biger_temp = (*prediction_train) > temp;
// (train_label>=5)
Matrix *train_label = new Matrix(step,boost_col);
train_label->partOfMatrix(step,boost_col,*train_label_matrix);
Matrix train_label_biger_value = (*train_label) > 3;
// ((train_label>=5)-(prediction>=temp))
Matrix* train_label_minus_prediction = new Matrix(step,boost_col);
train_label_minus_prediction->matrixMinusMatrix(prediction_biger_temp,train_label_biger_value);
// abs((train_label>=5)-(prediction>=temp))
Matrix* abs_train_label_minus_prediction = new Matrix(step,boost_col);
abs_train_label_minus_prediction->matrixAbs(*train_label_minus_prediction);
// sum(abs((train_label>=5)-(prediction>=temp)))
double sum_abs_train_label_minus_prediction = 0;
Matrix* sum_abs_train_label_minus_prediction_matrix = new Matrix(step,boost_col);
sum_abs_train_label_minus_prediction_matrix->matrixSumRow(&sum_abs_train_label_minus_prediction,*abs_train_label_minus_prediction);
// errorTrain=sum(abs((train_label>=5)-(prediction>=temp)))/num(1)
errorTrain->scalarDivisonScalar(sum_abs_train_label_minus_prediction,step, *errorTrain);
prediction_train->zeros(step,value_one);
/*****************************/
/* prediction=zeros(1000,1); */
/*****************************/
// geting the test error
if (cycles < 100){
step=test_row;
}
Matrix* prediction_test = new Matrix(step,value_one);
/******************************************************************************/
/* for h=1:d(1) */
/* prediction = prediction - log(boost(h,1)) * hypothesis; */
/* end */
/******************************************************************************/
for (int hh=1; hh < boost_row; hh++){
// hypothesis
Matrix* partOfTestHypothesis = new Matrix(step,value_one);
partOfTestHypothesis->specificPartOfMatrix(*test_hypothesis,step,hh-1);
// (boost(h,1))
Matrix* matrix_val_boost = new Matrix(hh,value_one);
double val_boost=0;
matrix_val_boost->getValueSpecific(hh-1,0,&val_boost,*boost);
// log(boost(h,1))
double log_val_boost = log(val_boost);
// log(boost(h,1)) * hypothesis
Matrix* boostMultiTest = new Matrix(step,value_one);
boostMultiTest->ScalarMultiMatrix(log_val_boost,*partOfTestHypothesis);
// prediction-log(boost(h,1)) * hypothesis
prediction_test->matrixMinusMatrix(*boostMultiTest,*prediction_test);
//prediction_test->matrixMinusMatrix(*prediction_test,*boostMultiTest);
delete matrix_val_boost;
delete partOfTestHypothesis;
delete boostMultiTest;
}
//double predictionTest_array[2002];
//Matrix* getPredictionTestArray = new Matrix(step,value_one);
//getPredictionTestArray->matrixToArray(predictionTest_array,*prediction_test);
/****************************************************************/
/* errorTest=sum(abs((test_label>=5)-(prediction>=temp)))/1000; */
/****************************************************************/
// (prediction>=temp)
Matrix prediction_biger_temp_test = (*prediction_test) > temp;
tmp_test_hypothesis->copyMatrixToMatrix(prediction_biger_temp_test);
// (train_label>=5)
Matrix *test_label = new Matrix(step,boost_col);
test_label->partOfMatrix(step,boost_col,*test_label_matrix);
Matrix test_label_biger_value = (*test_label) > 3;
// ((train_label>=5)-(prediction>=temp))
Matrix* test_label_minus_prediction = new Matrix(step,boost_col);
test_label_minus_prediction->matrixMinusMatrix(prediction_biger_temp_test,test_label_biger_value);
// abs((train_label>=5)-(prediction>=temp))
Matrix* abs_test_label_minus_prediction = new Matrix(step,boost_col);
abs_test_label_minus_prediction->matrixAbs(*test_label_minus_prediction);
// sum(abs((train_label>=5)-(prediction>=temp)))
double sum_abs_test_label_minus_prediction = 0;
Matrix* sum_abs_test_label_minus_prediction_matrix = new Matrix(step,boost_col);
sum_abs_test_label_minus_prediction_matrix->matrixSumRow(&sum_abs_test_label_minus_prediction,*abs_test_label_minus_prediction);
// errorTrain=sum(abs((train_label>=5)-(prediction>=temp)))/num(1)
//errorTest->scalarDivisonScalar(sum_abs_test_label_minus_prediction,test_row, *errorTest);
errorTest->scalarDivisonScalar(sum_abs_test_label_minus_prediction,step, *errorTest);
prediction_test->zeros(step,value_one);
//delete Matrix
// delete getPredictionTestArray;
delete prediction_train;
delete prediction_test;
delete train_label;
delete test_label;
delete log_boost;
delete sum_log_boost_matrix;
delete train_label_minus_prediction;
delete test_label_minus_prediction;
delete abs_train_label_minus_prediction;
delete abs_test_label_minus_prediction;
delete sum_abs_train_label_minus_prediction_matrix;
delete sum_abs_test_label_minus_prediction_matrix;
return (0);
}
/* grep through file and count number of lines, maximum number of spaces per line, and */
/* longest line. */
int nol_ll(char *file,long *nol,long *wol,long *ll)
{
FILE *fl;
int ic;
char c;
long current_length,current_wol;
if ((fl = fopen (file, "r")) == NULL)
{
printe (file);
return -1;
}
current_length=0;
current_wol=0;
(*ll)=0;
(*nol)=1;
(*wol)=0;
while((ic=getc(fl)) != EOF)
{
c=(char)ic;
current_length++;
if(c == ' ')
{
current_wol++;
}
if(c == '\n')
{
(*nol)++;
if(current_length>(*ll))
{
(*ll)=current_length;
}
if(current_wol>(*wol))
{
(*wol)=current_wol;
}
current_length=0;
current_wol=0;
}
}
fclose(fl);
}
long get_runtime()
{
clock_t start;
start = clock();
return((long)((double)start*100.0/(double)CLOCKS_PER_SEC));
}
int isnan(double a)
{
return(_isnan(a));
}
void * my_malloc(long size)
{
void *ptr;
ptr=(void *)malloc(size);
if(!ptr)
{
printe ("Out of memory!");
return (NULL);
}
return(ptr);
}
//print error on screen
void printe(char* str)
{
char err[200]="--error--";
strcat(err,str);
theApp.ShowM(err);
}
//print message on screen
void printm(char* str)
{
theApp.ShowM(str);
}
void SetInitParam()
{
com_param.biased_Hyperplane=1;
com_param.remove_inconsitant=0;
com_param.C =0.0;
com_param.cost_factor =1.0;
com_param.loo =0;
com_param.search_depth = 0;
com_param.rho = 1.0;
com_param.fraction =1.0;
com_param.rbf_gamma =1.0;
com_param.poly_c =0.0;
com_param.poly_s =1.0;
com_param.poly_degree =1;
com_param.kernel_type =0;
//com_param.user_u = pp4.m_strU ;
com_param.epsion =0.001;
com_param.iteration_time =100;
com_param.cache_size =40;
com_param.new_variable =com_param.maximum_size;
com_param.maximum_size =10;
com_param.final_test = 1;
com_param.blAadaboostresults=TRUE;
com_param.Running=FALSE;
com_param.Finished=TRUE;
com_param.Close=FALSE;
//prompt default values
com_pro.show_action=TRUE;
com_pro.show_compute_1=TRUE;
com_pro.show_compute_2=FALSE;
com_pro.show_compute_3=FALSE;
com_pro.show_other=FALSE;
com_pro.show_readfile=FALSE;
com_pro.show_writefile=FALSE;
com_pro.show_testresult=TRUE;
com_pro.show_trainresult=FALSE;
}
/******************** tmp code ****************************/
/*if (com_pro.show_compute_1)
{
sprintf(temstr,"index: %.d\n",index);
printm(temstr);
}
//i=floor(tt/16)+1;
if (index > 400 && index < 500 ){
(*i) = floor(index/17)+1;
if (*i == (c_train-1)){
*i = (c_train-10);
}
}
if (index > 300 && index < 400 ){
(*i) = floor(index/12)+1;
if (*i == (c_train-1)){
*i = (c_train-10);
}
}
if (index > 200 && index < 300){
(*i) = floor(index/7)+1;
if (*i == (c_train-1)){
*i = (c_train-10);
}
}
if (index > 100 && index < 200){
(*i) = floor(index/5)+1;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -