📄 geneticweaklearner.cpp
字号:
//LAB_LicenseBegin==============================================================
// Copyright (c) 2005-2006, Hicham GHORAYEB < ghorayeb@gmail.com >
// All rights reserved.
//
// This software is a Library for Adaptive Boosting. It provides a generic
// framework for the study of the Boosting algorithms. The framework provides
// the different tasks for boosting: Learning, Validation, Test, Profiling and
// Performance Analysis Tasks.
//
// This Library was developped during my PhD studies at:
// Ecole des Mines de Paris - Centre de Robotique( CAOR )
// http://caor.ensmp.fr
// under the supervision of Pr. Claude Laurgeau and Bruno Steux
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the distribution.
// * Neither the name of the Ecole des Mines de Paris nor the names of
// its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//================================================================LAB_LicenseEnd
#include "GeneticWeakLearner.h"
#include "samples/learningsets/ILearningSet.h"
#include "samples/Sample.h"
#include "classifiers/features/ImageFeature.h"
#include "classifiers/features/ControlPointFeature.h"
#include "classifiers/features/IRandomWeakClassifierGenerator.h"
#include "LibAdaBoost/modules/options/GeneticWeakLearnerOptions.h"
using namespace modules::options;
#include "wlinterfaces/GeneticWeakLearnerInterface.h"
using namespace learners::wlinterfaces;
#include <cstdlib>
#include <iostream>
#include <algorithm>
#include <iostream>
#include <iomanip>
#include <string>
#include <sstream>
using namespace std;
using namespace samples;
using namespace classifiers;
using namespace classifiers::features;
using namespace learners;
namespace learners{
bool lessErr(const WeakClassifier *i1,const WeakClassifier *i2)
{
return i1->GetError() < i2->GetError();
}
}
GeneticWeakLearner::GeneticWeakLearner()
:m_LearningSet ( NULL )
,m_RandomGenerator( NULL )
,m_nb_indiv ( GA_DEFAULT_NBR_INDIV )
,m_winner_to_keep ( GA_DEFAULT_WINNER_TO_KEEP )
,m_looser_to_keep ( GA_DEFAULT_LOOSER_TO_KEEP )
,m_max_generation ( GA_DEFAULT_MAX_GENERATION )
,m_nb_mutation ( GA_DEFAULT_NBR_MUTATION )
,m_interface ( NULL )
,m_bestwc ( t_indiv(NULL, 1.0) )
{
m_population.reserve( m_nb_indiv );
m_StringLog.clear();
}
GeneticWeakLearner::GeneticWeakLearner(ILearningSet *learningSet)
:m_LearningSet ( learningSet )
,m_RandomGenerator( NULL )
,m_nb_indiv ( GA_DEFAULT_NBR_INDIV )
,m_winner_to_keep ( GA_DEFAULT_WINNER_TO_KEEP )
,m_looser_to_keep ( GA_DEFAULT_LOOSER_TO_KEEP )
,m_max_generation ( GA_DEFAULT_MAX_GENERATION )
,m_nb_mutation ( GA_DEFAULT_NBR_MUTATION )
,m_interface ( NULL )
,m_bestwc ( t_indiv(NULL, 1.0) )
{
m_population.reserve( m_nb_indiv );
m_StringLog.clear();
}
GeneticWeakLearner::~GeneticWeakLearner()
{
for(int i=0; i < m_population.size(); i++)
{
delete m_population[i];
m_population[i] = NULL;
}
if( m_bestwc.first != NULL){
delete m_bestwc.first;
}
m_population.clear();
}
// IWeakLearner
WeakClassifier *
GeneticWeakLearner::GetNewWeakClassifier(double &error)
{
WeakClassifier *wc = NULL;
// Initialize the Learner, random generation of the
// initial population
Reset();
// Run the Genetic Algorithm
Learn();
// Get the result of the genetic algorithm
wc = (WeakClassifier *)m_bestwc.first->Clone();
// Get the learning result
error = m_bestwc.second;
return wc;
}
// GA Protected Methods
void
GeneticWeakLearner::TestWeakClassifier(WeakClassifier *feature, const ILearningSet *learningSet)
{
double err = 0.0;
int nbTruePositive = 0;
int nbTrueNegative = 0;
int nbFalsePositive = 0;
int nbFalseNegative = 0;
//std::cout << "[CHECK POSITIVE SAMPLES]" <<std::endl;
// CHECK POSITIVE SAMPLES
for(unsigned i=0; i < learningSet->NbrOfPositiveSamples(); i++)
{
const Sample *sample = learningSet->GetPositiveSampleConstAt( i );
int curClass = feature->Classify( sample );
if( curClass == POSITIVE_CLASS_ID)
{
nbTruePositive++;
}else{
nbFalseNegative++;
// INCREMENT ERROR
err += sample->GetWeight();
}
}
//std::cout << "[CHECK NEGATIVE SAMPLES]" <<std::endl;
// CHECK NEGATIVE SAMPLES
for(unsigned i=0; i < learningSet->NbrOfNegativeSamples(); i++)
{
const Sample *sample = learningSet->GetNegativeSampleConstAt( i );
int curClass = feature->Classify( sample );
if( curClass == NEGATIVE_CLASS_ID)
{
nbTrueNegative++;
}else{
nbFalsePositive++;
// INCREMENT ERROR
err += sample->GetWeight();
}
}
//std::cout << "[SET THE ERROR]" <<std::endl;
// SET THE ERROR
feature->SetError( err );
// LOG CLASSIFICATION RESULT FOR FURTHER PROCESSING
feature->SetTruePositive( nbTruePositive );
feature->SetTrueNegative( nbTrueNegative );
feature->SetFalsePositive( nbFalsePositive );
feature->SetFalseNegative( nbFalseNegative );
}
// IWeakLearner
void
GeneticWeakLearner::Learn(void)
{
assert( m_LearningSet != NULL );
const GeneticWeakLearnerInterface *myInterface = (const GeneticWeakLearnerInterface *)GetWeakLearnerInterface();
ostringstream oStrLog;
unsigned generation = 0;
unsigned nbr_generation_no_improvement = 0;
oStrLog.clear();
/* TODO:: SAVE THE BEST FEATURE IN THE EVOLUTION POPULATION
* from one iteration to another
*/
generation = 0;
while( true ){
// ITERATES OVER ALL THE INDIV IN THE POPULATION
// APPLY EACH WEAK CLASSIFIER ON THE LEARNING SET
// AND GET THE RESULT ERROR.
GeneticWeakLearner::t_population::iterator it;
for(it = m_population.begin(); it != m_population.end(); it++){
// EVALUATE THE CURENT WEAK CLASSIFIER ON THE LEARNING SET
WeakClassifier *wc = (*it);
TestWeakClassifier(wc, m_LearningSet);
}
// SORT THE WEAK CLASSIFIERS IN THE POPULATION RESPECT TO ERROR
std::sort(m_population.begin(), m_population.end(), lessErr );
for(unsigned t=0; t < (m_population.size() - 1); t++)
{
//std::cerr << t << "-->"<< m_population[t]->GetError() << ", " << m_population[t+1]->GetError() << std::endl;
assert( m_population[t]->GetError() <= m_population[t+1]->GetError());
}
// Clone The Best WeakClassifier
WeakClassifier *cur_bestwc = (WeakClassifier *)m_population.front()->Clone();
double cur_error = m_population.front()->GetError();
// DO SOME LOG ON THAT STEP: Report results of this generation
std::cout
//<< "Current Error = "
<< setprecision( 5 )
<< setw( 7 )
<< left
<< cur_error << " c, "
<< setprecision( 5 )
<< setw( 7 )
<< left
<< m_bestwc.second << " l, "
<< setfill(' ')
<< setw( 4 )
<< nbr_generation_no_improvement
<< "th time"
<< "("
<< setw( 4 )
<< cur_bestwc->GetTruePositive()
<< "/"
<< setw( 4 )
<< m_LearningSet->NbrOfPositiveSamples()
<< " pos"
<< ", "
<< setw( 4 )
<< cur_bestwc->GetTrueNegative()
<< "/"
<< setw( 4 )
<< m_LearningSet->NbrOfNegativeSamples()
<< " neg"
<< ", "
<< setw( 4 )
<< cur_bestwc->GetTrueNegative() + cur_bestwc->GetTruePositive()
<< "/"
<< setw( 4 )
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -