⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 testhaarstump.cc

📁 torch tracking code, it is a good code
💻 CC
字号:
const char *help = "\progname: testHaarStump.cc\n\code2html: This program tests a linear combinaison of Haar-like classifiers.\n\version: Torch3 vision2.0, 2004-2005\n\(c) Sebastien Marcel (marcel@idiap.ch) and Yann Rodriguez (rodrig@idiap.ch)\n";// Torch// command-lines#include "FileListCmdOption.h"#include "CmdLine.h"/** Torch3vision*/// Stump machines#include "HaarStumpMachine.h"#include "HaarRealStumpMachine.h"#include "ImageWeightedSumMachine.h"// datasets//#include "FileBinDataSet.h"#include "DiskBinDataSet.h"// image processing#include "ipIntegralImage.h"#include "ipNormMeanStdvLight.h"using namespace Torch;int main(int argc, char **argv){   	//   	int width;	int height;	//	char *model_filename;	//	bool image_normalize;	bool haar_normalize;	bool realstump;	//	char *output_basename;	int n_histo_bins;	//	bool verbose;	Allocator *allocator = new Allocator;	FileListCmdOption filelist_class("file name", "the list files or one data file of patterns");	filelist_class.isArgument(true);	//	// Prepare the command-line	CmdLine cmd;	cmd.setBOption("write log", false);	cmd.info(help);	cmd.addText("\nArguments:");	cmd.addSCmdArg("model", &model_filename, "model filename");	cmd.addCmdOption(&filelist_class);	cmd.addICmdArg("width", &width, "width");	cmd.addICmdArg("height", &height, "height");	cmd.addText("\nOptions:");	cmd.addBCmdOption("-real", &realstump, false, "uses real stump");	cmd.addBCmdOption("-imagenorm", &image_normalize, false, "considers the input pattern as an image and performs a photometric normalization");	cmd.addBCmdOption("-haarnorm", &haar_normalize, false, "normalize feature outputs");  	cmd.addICmdOption("-nbins", &n_histo_bins, 100, "number of patterns to output");  	cmd.addBCmdOption("-verbose", &verbose, false, "verbose");  	cmd.addSCmdOption("-o", &output_basename, "test", "output basename");	//	// Read the command-line	cmd.read(argc, argv);	if((image_normalize && haar_normalize) || (!image_normalize && !haar_normalize))	{		warning("choose between image and haar normalization.\n");		return 0;	}		//        print("n_filenames = %d\n", filelist_class.n_files);        for(int i = 0 ; i < filelist_class.n_files ; i++)                print("   filename[%d] = %s\n", i, filelist_class.file_names[i]);	//	int n_trainers;	int n_inputs = width * height;	//FileBinDataSet *data = NULL;	DiskBinDataSet *data = NULL;	//data = new(allocator) FileBinDataSet(filelist_class.file_names, filelist_class.n_files, n_inputs);	data = new(allocator) DiskBinDataSet(filelist_class.file_names, filelist_class.n_files, n_inputs, -1);        data->info(false);	//	print("Pre-processing ...\n");   	ipCore *inorm_machine = NULL;	ipCore *i_machine = NULL;	real *integralimage2 = NULL;	if(haar_normalize)	{		i_machine = new(allocator) ipIntegralImage(width, height, "gray", true);	   	integralimage2 = i_machine->seq_out->frames[1];	}	else i_machine = new(allocator) ipIntegralImage(width, height, "gray");	if(image_normalize)	{		inorm_machine = new(allocator) ipNormMeanStdvLight(width, height, "float");		inorm_machine->setBOption("verbose", verbose);	}		//	print("Loading model %s ...\n", model_filename);		DiskXFile *model = new(allocator) DiskXFile(model_filename, "r");	model->taggedRead(&n_inputs, sizeof(int), 1, "N_INPUTS");	model->taggedRead(&n_trainers, sizeof(int), 1, "N_TRAINERS");	print(" + n_inputs = %d\n", n_inputs);	print(" + n_trainers = %d\n", n_trainers);		//	Machine **machines = (Machine **)allocator->alloc(n_trainers*sizeof(Machine *));	for(int j = 0 ; j < n_trainers ; j++)	   	if(realstump)			machines[j] = new(allocator) HaarRealStumpMachine(width, height, integralimage2);		else machines[j] = new(allocator) HaarStumpMachine(width, height, integralimage2);	ImageWeightedSumMachine *iwsm = new(allocator) ImageWeightedSumMachine(machines, n_trainers);	//	iwsm->loadXFile(model);	if(verbose)	{		for(int j = 0 ; j < n_trainers ; j++)		{	   		if(realstump)			{	   			HaarRealStumpMachine *h_ = (HaarRealStumpMachine *) machines[j];				h_->the_mask->info();			}			else			{	   			HaarStumpMachine *h_ = (HaarStumpMachine *) machines[j];				h_->the_mask->info();			}		}	}	//	real min_ = 1000.0;	real max_ = -1000.0;	//	real *outputs = (real *) allocator->alloc(data->n_examples * sizeof(real));	//	char output_filename[250];	sprintf(output_filename, "%s.output", output_basename);	DiskXFile *pf_output = new(allocator) DiskXFile(output_filename, "w");	for(int i=0; i<data->n_examples; i++)        {		data->setExample(i);		real *input_ = data->inputs->frames[0];				Sequence *seqinput = data->inputs;		if(image_normalize)		{			inorm_machine->process(seqinput);						input_ = inorm_machine->seq_out->frames[0];						seqinput = inorm_machine->seq_out;		}		// computes integral image		i_machine->process(seqinput);		// remplace the input by its integral image		real *output_ = i_machine->seq_out->frames[0];		for(int j = 0 ; j < width * height ; j++)			data->inputs->frames[0][j] = output_[j];			// forward		iwsm->forward(data->inputs);		outputs[i] = iwsm->outputs->frames[0][0];	   	if(verbose) 			print(" -> %g\n", outputs[i]);		pf_output->printf("%g\n", outputs[i]);		if(i == 0)		{			min_ = outputs[i];			max_ = outputs[i];		}		else		{			if(outputs[i] < min_) min_ = outputs[i];			if(outputs[i] > max_) max_ = outputs[i];		}	}		print("min = %g\n", min_);	print("max = %g\n", max_);	//	int *histo = (int *) allocator->alloc(n_histo_bins * sizeof(int));	for(int i = 0 ; i < n_histo_bins ; i++) histo[i] = 0;	//	real n_1 = n_histo_bins - 1;	for(int i = 0 ; i < data->n_examples ; i++)	{		int index = FixI(n_1 * (outputs[i] - min_) / (max_ - min_));		histo[index]++;	}	//	real histo_max_ = 0.0;	for(int i = 0 ; i < n_histo_bins ; i++)		if(histo[i] > histo_max_) histo_max_ = histo[i];	//	char histo_filename[250];	sprintf(histo_filename, "%s.histo", output_basename);	DiskXFile *pf_histo = new(allocator) DiskXFile(histo_filename, "w");	for(int i = 0 ; i < n_histo_bins ; i++)		if(histo[i] != 0)		{			real output_ = (real) i * (max_ - min_) / n_1 + min_;						real histo_ = (real) histo[i] / histo_max_;			//real histo_ = (real) histo[i] / (real) data->n_examples;			pf_histo->printf("%g %g\n", output_, histo_);		}	//  	delete allocator;  	return(0);}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -