⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 main.cpp

📁 MultiBoost 是c++实现的多类adaboost酸法。与传统的adaboost算法主要解决二类分类问题不同
💻 CPP
字号:
/** This file is part of MultiBoost, a multi-class * AdaBoost learner/classifier** Copyright (C) 2005-2006 Norman Casagrande* For informations write to nova77@gmail.com** This library is free software; you can redistribute it and/or* modify it under the terms of the GNU Lesser General Public* License as published by the Free Software Foundation; either* version 2.1 of the License, or (at your option) any later version.** This library is distributed in the hope that it will be useful,* but WITHOUT ANY WARRANTY; without even the implied warranty of* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU* Lesser General Public License for more details.** You should have received a copy of the GNU Lesser General Public* License along with this library; if not, write to the Free Software* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA**//*! \mainpage MultiBoost Doxygen Documentation** \section intro_sec Introduction** MultiBoost is a multi-class AdaBoost learner, very flexible and * well commented. Feel free to use it and to modify it!** Copyright (C) 2005-2006 Norman Casagrande.* This software is covered by the * <a href="http://www.gnu.org/copyleft/lesser.html">LGPL</a> licence.** \section install_sec Installation** Just run this simple two commands to compile:** \code* % tmake multiboost.pro -o Makefile* % make* \endcode* * Note: you need <a href="http://www.trolltech.com/download/freebies.html">tmake</a> to create* the Makefile. If you don't have it, and you are using g++ on a linux machine machine, you can use* <a href="http://www.iro.umontreal.ca/~casagran/mb/support/Makefile">this Makefile</a>.** To run the program, just type:* \code* % multiboost* \endcode** To get some help, type:* \code * % multiboost -help* \endcode** \section References* * Here's the \b bibtex reference:\verbatim@misc{multiboost,      author   = {Norman Casagrande},      title    = {MultiBoost: An open source multi-class AdaBoost learner},      note     = {http://www.iro.umontreal.ca/~casagran/multiboost.html},      year     = {2005-2006}}\endverbatim*//*** \file main.cpp* The file that contains the main() function.* @date 10/11/2005*/#include <vector>#include <string>#include <map>#include <iostream>#include "Defaults.h"#include "Utils/Args.h"#include "AdaBoostMHLearner.h"#include "Classifier.h"#include "WeakLearners/BaseLearner.h" // To get the list of the registered weak learners#include "IO/ClassMappings.h" // for -classmap optionusing namespace std;using namespace MultiBoost;/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////*** Show the basic output. Called when no argument is provided.* @date 11/11/2005*/void showBase(){   cout << "MultiBoost (v" << CURRENT_VERSION << "). An obvious name for a multi-class AdaBoost learner." << endl;   cout << "---------------------------------------------------------------------------" << endl;   cout << "Build: " << __DATE__ << " (" << __TIME__ << ") (C) Norman Casagrande 2005-2006" << endl << endl;   cout << "===> Type -help for help or -static to show the static options" << endl;   exit(0);}//---------------------------------------------------------------------------/*** Show the help. Called when -h argument is provided.* @date 11/11/2005*/void showHelp(nor_utils::Args& args, const vector<string>& learnersList){   cout << "MultiBoost (v" << CURRENT_VERSION << "). An obvious name for a multi-class AdaBoost learner." << endl;   cout << "------------------------ HELP SECTION --------------------------" << endl;   args.printGroup("Parameters");   cout << endl;   cout << "For specific help options type:" << endl;   cout << "   -h general: General options" << endl;   cout << "   -h io: I/O options" << endl;   cout << "   -h algo: Basic algorithm options" << endl;   cout << endl;   cout << "For weak learners specific options type:" << endl;      vector<string>::const_iterator it;   for (it = learnersList.begin(); it != learnersList.end(); ++it)      cout << "   -h " << *it << endl;   exit(0);}//---------------------------------------------------------------------------/*** Show the help for the options.* @param args The arguments structure.* @date 28/11/2005*/void showOptionalHelp(nor_utils::Args& args){   string helpType = args.getValue<string>("h", 0);   cout << "MultiBoost (v" << CURRENT_VERSION << "). An obvious name for a multi-class AdaBoost learner." << endl;   cout << "---------------------------------------------------------------------------" << endl;   if (helpType == "general")      args.printGroup("General Options");   else if (helpType == "io")      args.printGroup("I/O Options");   else if (helpType == "algo")      args.printGroup("Basic Algorithm Options");   else if ( BaseLearner::RegisteredLearners().hasLearner(helpType) )      args.printGroup(helpType + " Options");   else      cerr << "ERROR: Unknown help section <" << helpType << ">" << endl;}//---------------------------------------------------------------------------/*** Show the default values.* @date 11/11/2005*/void showStaticConfig(){   cout << "MultiBoost (v" << CURRENT_VERSION << "). An obvious name for a multi-class AdaBoost learner." << endl;   cout << "------------------------ STATIC CONFIG -------------------------" << endl;   cout << "- Sort type = ";#if CONSERVATIVE_SORT   cout << "CONSERVATIVE (slow)" << endl;#else   cout << "NON CONSERVATIVE (fast)" << endl;#endif   cout << "Comment: " << COMMENT << endl;#ifndef NDEBUG   cout << "Important: NDEBUG not active!!" << endl;#endif#if MB_DEBUG   cout << "MultiBoost debug active (MB_DEBUG=1)!!" << endl;#endif   exit(0);  }//---------------------------------------------------------------------------//---------------------------------------------------------------------------/*** The main function. Everything starts here!* @param argc The number of arguments.* @param argv The arguments.* @date 11/11/2005*/int main(int argc, char* argv[]){   // no need to synchronize with C style stream   std::ios_base::sync_with_stdio(false);   //////////////////////////////////////////////////////////////////////////   // Standard arguments   nor_utils::Args args;   args.setArgumentDiscriminator("-");   args.declareArgument("help");   args.declareArgument("static");   args.declareArgument("h", "Help", 1, "<optiongroup>");   //////////////////////////////////////////////////////////////////////////   // Basic Arguments   args.setGroup("Parameters");   args.declareArgument("train", "Performs training.", 2, "<dataFile> <nInterations>");   args.declareArgument("traintest", "Performs training and test at the same time.", 3, "<trainingDataFile> <testDataFile> <nInterations>");   args.declareArgument("test", "Test the model.", 2, "<dataFile> <shypFile>");   args.declareArgument("test", "Test the model and output the results", 3, "<datafile> <shypFile> <outFile>");   args.declareArgument("cmatrix", "Print the confusion matrix for the given model.", 2, "<dataFile> <shypFile>");   args.declareArgument("cmatrixfile", "Print the confusion matrix with the class names to a file.", 3, "<dataFile> <shypFile> <outFile>");   args.declareArgument("ssfeatures", "Print matrix data for SingleStump-Based weak learners (if numIters=0 it means all of them).", 4, "<dataFile> <shypFile> <outFile> <numIters>");   //////////////////////////////////////////////////////////////////////////   // Options   args.setGroup("General Options");   args.declareArgument("verbose", "Set the verbose level 0, 1 or 2 (0=no messages, 1=default, 2=all messages).", 1, "<val>");   args.declareArgument("outputinfo", "Output informations on the algorithm performances during training, on file <filename>.", 1, "<filename>");   //////////////////////////////////////////////////////////////////////////   // Shows the list of available learners   string learnersComment = "Available learners are:";   vector<string> learnersList;   BaseLearner::RegisteredLearners().getList(learnersList);   vector<string>::const_iterator it;   for (it = learnersList.begin(); it != learnersList.end(); ++it)   {      learnersComment += "\n ** " + *it;      // defaultLearner is defined in Defaults.h      if ( *it == defaultLearner )         learnersComment += " (DEFAULT)";   }   args.declareArgument("learnertype", "Change the type of weak learner. " + learnersComment, 1, "<learner>");   //////////////////////////////////////////////////////////////////////////   //// Declare arguments that belongs to all weak learners   BaseLearner::declareBaseArguments(args);   ////////////////////////////////////////////////////////////////////////////   //// Weak learners (and input data) arguments   for (it = learnersList.begin(); it != learnersList.end(); ++it)   {      args.setGroup(*it + " Options");      // add weaklearner-specific options      BaseLearner::RegisteredLearners().getLearner(*it)->declareArguments(args);   }   //////////////////////////////////////////////////////////////////////////////////////////     //////////////////////////////////////////////////////////////////////////////////////////   switch ( args.readArguments(argc, argv) )   {   case nor_utils::AOT_NO_ARGUMENTS:      showBase();      break;   case nor_utils::AOT_UNKOWN_ARGUMENT:      exit(1);      break;   case nor_utils::AOT_INCORRECT_VALUES_NUMBER:      exit(1);      break;   }   //////////////////////////////////////////////////////////////////////////////////////////     //////////////////////////////////////////////////////////////////////////////////////////   if ( args.hasArgument("help") )      showHelp(args, learnersList);   if ( args.hasArgument("static") )      showStaticConfig();   //////////////////////////////////////////////////////////////////////////////////////////     //////////////////////////////////////////////////////////////////////////////////////////   if ( args.hasArgument("h") )      showOptionalHelp(args);   //////////////////////////////////////////////////////////////////////////////////////////     //////////////////////////////////////////////////////////////////////////////////////////   int verbose = 1;   if ( args.hasArgument("verbose") )      args.getValue("verbose", 0, verbose);   //////////////////////////////////////////////////////////////////////////////////////////     //////////////////////////////////////////////////////////////////////////////////////////   if ( args.hasArgument("train") )   {      // -train <dataFile> <nInterations>      string trainFileName = args.getValue<string>("train", 0);      int numInterations = args.getValue<int>("train", 1);            AdaBoostMHLearner learner(args, verbose);      learner.run(numInterations, trainFileName);   }   //////////////////////////////////////////////////////////////////////////////////////////   //////////////////////////////////////////////////////////////////////////////////////////     else if ( args.hasArgument("traintest") )   {      // -traintest <trainingDataFile> <testDataFile> <nInterations>      string trainFileName = args.getValue<string>("traintest", 0);      string testFileName = args.getValue<string>("traintest", 1);      int numInterations = args.getValue<int>("traintest", 2);      AdaBoostMHLearner learner(args, verbose);      learner.run(numInterations, trainFileName, testFileName);   }    //////////////////////////////////////////////////////////////////////////////////////////   //////////////////////////////////////////////////////////////////////////////////////////   else if ( args.hasArgument("test") )   {      // -test <dataFile> <shypFile>      string testFileName = args.getValue<string>("test", 0);      string shypFileName = args.getValue<string>("test", 1);      string outResFileName;      if ( args.getNumValues("test") > 2 )         args.getValue("test", 2, outResFileName);      Classifier classifier(args, verbose);      classifier.run(testFileName, shypFileName, outResFileName);   }   //////////////////////////////////////////////////////////////////////////////////////////   //////////////////////////////////////////////////////////////////////////////////////////   else if ( args.hasArgument("cmatrix") )   {      // -cmatrix <dataFile> <shypFile>      string testFileName = args.getValue<string>("cmatrix", 0);      string shypFileName = args.getValue<string>("cmatrix", 1);      Classifier classifier(args, verbose);      classifier.printConfusionMatrix(testFileName, shypFileName);   }   //////////////////////////////////////////////////////////////////////////////////////////   //////////////////////////////////////////////////////////////////////////////////////////   else if ( args.hasArgument("cmatrixfile") )   {      // -cmatrixfile <dataFile> <shypFile> <outFileName>      string testFileName = args.getValue<string>("cmatrixfile", 0);      string shypFileName = args.getValue<string>("cmatrixfile", 1);      string outFileName = args.getValue<string>("cmatrixfile", 2);      Classifier classifier(args, verbose);      classifier.saveConfusionMatrix(testFileName, shypFileName, outFileName);   }   //////////////////////////////////////////////////////////////////////////////////////////   //////////////////////////////////////////////////////////////////////////////////////////   else if ( args.hasArgument("ssfeatures") )   {      // -ssfeatures <dataFile> <shypFile> <outFile> <numIters>      string testFileName = args.getValue<string>("ssfeatures", 0);      string shypFileName = args.getValue<string>("ssfeatures", 1);      string outFileName = args.getValue<string>("ssfeatures", 2);      int numIterations = args.getValue<int>("ssfeatures", 3);      Classifier classifier(args, verbose);      classifier.saveSingleStumpFeatureData(testFileName, shypFileName, outFileName, numIterations);   }   return 0;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -