📄 weaklearner.cpp
字号:
/****************************************************************************
NJU Magic. Copyright (c) 2007. All Rights Reserved.
--------------------------------------------------------------------
Permission to use, copy, or modify this software and its documentation
for educational and research purposes only and without fee is hereby
granted, provided that this copyright notice appear on all copies and
supporting documentation. For any other uses of this software, in
original or modified form, including but not limited to distribution
in whole or in part, specific prior permission must be obtained from
NJU Magic and the authors. These programs shall not be used, rewritten,
or adapted as the basis of a commercial software or hardware product
without first obtaining appropriate licenses from NJU Magic. NJU Magic
makes no representations about the suitability of this software for any
purpose. It is provided "as is" without express or implied warranty.
---------------------------------------------------------------------
File: WeakLearner.cpp
Authors: Yao Wei
Date Created: 2007-8-11
****************************************************************************/
#include "WeakLearner.h"
//////////////////////////////////////////////////////////////////////
// Vec
//////////////////////////////////////////////////////////////////////
Vec::Vec():n(0), vector(0),isfree(false)
{
}
Vec ::Vec(int _n):n(_n),isfree(false)
{
Set(n);
}
Vec::~Vec()
{
if(!isfree)
Free();
}
void Vec::Set(int n)
{
this->n = n;
vector = new double[n];
}
void Vec::Free()
{
delete []vector;
isfree = true;
}
//////////////////////////////////////////////////////////////////////
// Mat
//////////////////////////////////////////////////////////////////////
Mat::Mat():row(0), col(0),matrix(0),isfree(false), _iterator(0)
{
}
Mat::Mat(int _row , int _col ):row(_row), col(_col),isfree(false),_iterator(0)
{
Set(row, col);
}
void Mat::Set(int row, int col)
{
this->row = row;
this->col = col;
matrix = new Vec[row];
for(int i = 0; i < row; i++)
matrix[i].Set(col);
}
Mat::~Mat()
{
if(!isfree)
Free();
}
void Mat::AddRow(const Vec &feature)
{
if(_iterator == row)
return;
for(int j=0; j<col; j++)
matrix[_iterator][j] = feature[j];
_iterator ++;
}
void Mat::Free()
{
for(int i=0; i<row; i++)
matrix[i].Free();
delete []matrix;
isfree = true;
}
//////////////////////////////////////////////////////////////////////
// DecisionStump
//////////////////////////////////////////////////////////////////////
DecisionStump::DecisionStump()
{
}
DecisionStump::~DecisionStump()
{
}
void DecisionStump::QuickSort(double* values,int* indices,const int l,const int r)
{
int i=l, j=r;
double v;
int t;
double x = values[(l+r)/2];//pivot
do
{
while (values[i]<x) i++;
while (values[j]>x) j--;
if (i<=j)
{
v=values[i]; values[i]=values[j]; values[j]=v;
t=indices[i]; indices[i]=indices[j]; indices[j]=t;
i++; j--;
}
} while (i<=j);
if (l<j) QuickSort(values,indices,l, j);
if (i<r) QuickSort(values,indices,i, r);
}
void DecisionStump::RoundTrain(const Mat &mat1, const Mat &mat2, double* _weights)
{
int i ;
int n = mat1.rows() + mat2.rows();
int D = mat1.cols();
// Search for minimum training set error
double minimumError = INFINITER;
_d = -1;
_threshold = INFINITER;
_sign = 0;
// Pre-sort data-items in dimension for efficient threshold search
double* data = new double[n];
int* indices = new int[n];
int* label = new int[n];
// Loop over possible dimensions
for (int d = 0; d < D; d++)
{
for (i = 0; i < n; i++)
{
if(i<mat1.rows())
{
data[i] = mat1[i][d];
label[i] = +1;
}
else
{
data[i] = mat2[i-mat1.rows()][d];
label[i] = -1;
}
indices[i] = i;
}
QuickSort(data, indices, 0, n-1);
// Determine total potential error
double totalError = 0.0;
for (i = 0; i < n; i++)
totalError += _weights[i];
// Initialise search error
double currentError = 0.0;
for (i = 0; i < n; i++)
currentError += label[i] == -1 ? _weights[i] : 0.0;
// Search through the sorted list to determine best threshold
for (i = 0; i < n - 1; i++)
{
// Update current error
int index = indices[i];
if (label[index] == +1)
currentError += _weights[index];
else
currentError -= _weights[index];
// Check for repeated values
if (data[i] == data[i + 1])
continue;
// Compute the test threshold
// maximizes the margin between potential thresholds
double testThreshold = (data[i] + data[i + 1]) / 2.0;
// Compare to current best
if (currentError < minimumError) // Good classifier with _sign = +1
{
minimumError = currentError;
_d = d;
_threshold = testThreshold;
_sign = +1;
}
if ((totalError - currentError) < minimumError) // Good classifier with _sign = -1
{
minimumError = (totalError - currentError);
_d = d;
_threshold = testThreshold;
_sign = -1;
}
}
}
delete []label;
delete []data;
delete []indices;
}
//////////////////////////////////////////////////////////////////////
// WeakLearner
//////////////////////////////////////////////////////////////////////
WeakLearner::WeakLearner():decisionstump(0),alpha(0),
_iterator(0),maxnum(0),positive_label(+1),negative_label(-1)
{
}
WeakLearner::~WeakLearner()
{
}
void WeakLearner::Init(int num, double label1, double label2)
{
maxnum = num;
positive_label = label1;
negative_label = label2;
decisionstump = new DecisionStump[num];
alpha = new double[num];
}
void WeakLearner::AddStump(const DecisionStump& stump, double _alpha)
{
if(_iterator == maxnum)
return;
decisionstump[_iterator] = stump;
alpha[_iterator] = _alpha;
_iterator ++;
}
double WeakLearner::Classify(double * feature, int n_input)
{
double classification = 0.0;
// Call the weak learner classify methods and combine results
for(int t = 0; t < maxnum; t++)
classification += alpha[t] * decisionstump[t].Classify(feature, n_input);
// Return the thresholded classification
if(classification > 0.0)
return positive_label;
return negative_label;
}
void WeakLearner::Save(FILE *f)
{
fprintf(f, "\n%d: %g vs %g\n", _iterator, positive_label, negative_label);
for(int i = 0; i < _iterator; i++)
fprintf(f, "%g %d %d %g\n",alpha[i], decisionstump[i]._d,
decisionstump[i]._sign, decisionstump[i]._threshold);
}
void WeakLearner::Load(FILE *f)
{
fscanf(f,"%d: %lf vs %lf", &_iterator,&positive_label,&negative_label);
// Allocate memory
Init(_iterator, positive_label, negative_label);
for(int i =0; i<_iterator; i++)
fscanf(f,"%lf %d %d %lf",&alpha[i], &decisionstump[i]._d,
&decisionstump[i]._sign, &decisionstump[i]._threshold);
}
void WeakLearner::Free()
{
delete []decisionstump;
delete []alpha;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -