📄 geneticweaklearner.cpp
字号:
<< m_LearningSet->NbrOfSamples()
<< " all"
<< ")"
<< std::endl;
oStrLog<< setprecision( 5 )
<< setw( 7 )
<< left
<< cur_error << " "
<< setprecision( 5 )
<< setw( 7 )
<< left
<< m_bestwc.second << " "
<< setfill(' ')
<< setw( 4 )
<< nbr_generation_no_improvement << " "
<< setw( 4 )
<< cur_bestwc->GetTruePositive() << " "
<< setw( 4 )
<< m_LearningSet->NbrOfPositiveSamples()<< " "
<< setw( 4 )
<< cur_bestwc->GetTrueNegative() << " "
<< setw( 4 )
<< m_LearningSet->NbrOfNegativeSamples() << " "
<< setw( 4 )
<< cur_bestwc->GetTrueNegative() + cur_bestwc->GetTruePositive() << " "
<< setw( 4 )
<< m_LearningSet->NbrOfSamples() << " "
<< std::endl;
// Update the number Of Generations With No improvments
// compare this Weak Classifier to the previous, if it is better then swap
if( cur_error < m_bestwc.second )
{
// Store this new Weak Classifier
m_bestwc.second = cur_error;
if(m_bestwc.first != NULL){
delete m_bestwc.first;
m_bestwc.first = NULL;
}
m_bestwc.first = cur_bestwc;
cur_bestwc = NULL;
// Reset Counter to Zero
nbr_generation_no_improvement = 0;
}else{
// Delete the Temporary Cur Best Weak Classifier
delete cur_bestwc;
cur_bestwc = NULL;
nbr_generation_no_improvement++;
}
// IF STOP CONDITION IS TRUE THEN BREAK
// THE STOP CONDITION COULD BE THE TRACITIONAL ONE AS BELOW
// OR BASED ON THE MAS ALLOWED ITERATIONS WITHOUT IMPROVEMENT
// IF ERROR IS ZERO THEN THIS IS THE PERFECT CLASSIFIER
if( nbr_generation_no_improvement >= m_max_generation ){
break;
}
// SELECT BEST RANKING : A PERCENTAGE, A FIXED NUMBER, A SCORE ?
unsigned winner_to_keep = m_population.size() * m_winner_to_keep / 100;
unsigned looser_to_keep = (m_population.size() - winner_to_keep) * m_looser_to_keep / 100;
unsigned to_keep = winner_to_keep + looser_to_keep;
// DO MUTATE
for(int index = 1; index < winner_to_keep; index++)
{
for(int m=0; m < m_nb_mutation; m++)
{
WeakClassifier *mut= NULL;
//mut = m_population[ index ]->Mutate();
mut = myInterface->Mutate( *(m_population[index]) );
if( mut != NULL)
{
// Test The Mutate Result
double err = 0.0;
//std::cout << "[in]" <<std::endl;
TestWeakClassifier(mut, m_LearningSet);
//std::cout << "[out]" <<std::endl;
err = mut->GetError();
// If The error is less than the actual one
if( err < m_population[ index ]->GetError() )
{
// then replace the curent Weak Classifier with the Mutate Result
WeakClassifier *tmp = m_population[ index ];
m_population[ index ] = mut;
mut = tmp;
}
delete mut;
}
}
}
// SHUFFLE LOOSERS
std::random_shuffle(m_population.begin() + winner_to_keep, m_population.end());
for( it = (m_population.begin()+ winner_to_keep); it != (m_population.begin()+ to_keep) ;it++)
{
WeakClassifier *wc = (*it);
WeakClassifier *randWC= NULL;
// Randomize the Weak Classifier
//randWC = wc->Randomize();
randWC = myInterface->Randomize( *wc );
if( randWC != NULL)
{
// then replace the curent Weak Classifier with the Mutate Result
WeakClassifier *tmp = *it;
*it = randWC;
randWC = tmp;
delete randWC;
}
}
for( it = (m_population.begin()+ to_keep); it != m_population.end() ;it++)
{
WeakClassifier *wc = (*it);
WeakClassifier *randWC= NULL;
// Randomize the Weak Classifier
randWC = m_RandomGenerator->GetNewRandomWeakClassifier();
if( randWC != NULL)
{
// then replace the curent Weak Classifier with the Mutate Result
WeakClassifier *tmp = *it;
*it = randWC;
randWC = tmp;
delete randWC;
}
}
// INC GENERATION
generation++;
}
m_StringLog = oStrLog.str();
}
void GeneticWeakLearner::InitializePopulation( void )
{
// Clean Population
for(int i=0; i < m_population.size(); i++)
{
delete m_population[i];
m_population[i] = NULL;
}
m_population.clear();
m_RandomGenerator->Init();
// RANDOM GENERATION OF WEAK CLASSIFIERS
for(int i=0; i < m_nb_indiv; i++)
{
m_population.push_back( m_RandomGenerator->GetNewRandomWeakClassifier() );
}
}
void
GeneticWeakLearner::Reset(void)
{
assert( m_LearningSet != NULL);
assert( m_RandomGenerator != NULL);
assert( m_interface != NULL);
if( m_bestwc.first != NULL)
delete m_bestwc.first;
m_bestwc.first = NULL;
m_bestwc.second= 1.0;
m_StringLog.clear();
InitializePopulation();
}
IClassifier *
GeneticWeakLearner::GetResult(void)
{
return m_bestwc.first;
}
double
GeneticWeakLearner::GetResultError(void) const
{
return m_bestwc.second;
}
ILearningSet *
GeneticWeakLearner::GetLearningSet(void)
{
return m_LearningSet;
}
void
GeneticWeakLearner::SetLearningSet(ILearningSet *learningSet)
{
assert( learningSet != NULL );
m_LearningSet = learningSet;
}
void
GeneticWeakLearner::SetRandomWeakClassifierGenerator(IRandomWeakClassifierGenerator *randomGen)
{
m_RandomGenerator = randomGen;
}
IRandomWeakClassifierGenerator *
GeneticWeakLearner::GetRandomWeakClassifierGenerator(void)
{
return m_RandomGenerator;
}
const WeakLearnerInterface *
GeneticWeakLearner::GetWeakLearnerInterface(void)
{
return m_interface;
}
void
GeneticWeakLearner::SetWeakLearnerInterface(const WeakLearnerInterface *wli)
{
m_interface = wli;
}
const std::string &
GeneticWeakLearner::GetWeakLearnerLog() const
{
return m_StringLog;
}
const std::string &
GeneticWeakLearner::GetLearnerLog() const
{
return m_StringLog;
}
void
GeneticWeakLearner::SetOptions(const modules::options::ModuleOptions &options)
{
const modules::options::GeneticWeakLearnerOptions &opts = static_cast<const GeneticWeakLearnerOptions &>(options);
// get attributes
int nb_indiv = opts.GetNbrIndiv();
int winner_to_keep = opts.GetWinnerToKeep();
int looser_to_keep = opts.GetLooserToKeep();
int max_generation = opts.GetMaxGeneration();
int nb_mutation = opts.GetNbrMutation();
// set the attributes
m_nb_indiv = GA_DEFAULT_NBR_INDIV ;
m_winner_to_keep = GA_DEFAULT_WINNER_TO_KEEP ;
m_looser_to_keep = GA_DEFAULT_LOOSER_TO_KEEP ;
m_max_generation = GA_DEFAULT_MAX_GENERATION ;
m_nb_mutation = GA_DEFAULT_NBR_MUTATION ;
m_population.reserve( m_nb_indiv ) ;
m_StringLog.clear();
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -