⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 ctestsuit.cpp

📁 强化学习算法(R-Learning)难得的珍贵资料
💻 CPP
📖 第 1 页 / 共 4 页
字号:
	{
		loadEvaluationData(testSuite, getEvaluationFileName(testSuite).c_str());
	}
	printf("Loaded %d Trials for Evaluation!\n", values->size());

	FILE *evaluationFile = fopen(getEvaluationFileName(testSuite).c_str(), "a");
	while (values->size() < nTrials && ! exception)
	{
		doEvaluationTrial(testSuite, evaluationFile, getLearnDataFileName(testSuite).c_str());
	}
	fclose(evaluationFile);

	rlt_real evaluationValue = 0.0;

	if(exception)
	{
		evaluationValue = getParameter("DivergentEvaluationValue");
	}
	else
	{
		evaluationValue = getEvaluationValue(values);
	}

	return evaluationValue;
}

void CTestSuiteEvaluator::checkDirectories()
{
	char evaluationFileDirectory[255];
	char checkDirSystemCall[255];

#ifdef WIN32
	sprintf(evaluationFileDirectory, "%s\\", getEvaluatorDirectory().c_str());
#else
	sprintf(evaluationFileDirectory, "%s/", getEvaluatorDirectory().c_str());
#endif
	sprintf(checkDirSystemCall, "checkdir.bat %s", evaluationFileDirectory);

	system(checkDirSystemCall);

}
 
CTestSuiteNeededStepsEvaluator::CTestSuiteNeededStepsEvaluator(CAgent *agent, string testSuiteCollectionName, int totalLearnEpisodes, int stepsLearnEpisode, int episodesBeforeEvaluate, int nTrials, bool maxStepsSucceded) : CTestSuiteEvaluator(agent, testSuiteCollectionName, nTrials, (int) ceil((rlt_real)totalLearnEpisodes / (rlt_real) episodesBeforeEvaluate))
{
	this->totalLearnEpisodes = totalLearnEpisodes;
	this->stepsLearnEpisode = stepsLearnEpisode;
	this->episodesBeforeEvaluate = episodesBeforeEvaluate;
	this->maxStepsSucceded = maxStepsSucceded;

	succeded = new std::list<rlt_real *>();

	char evaluatorDirectoryChar[80];
	sprintf(evaluatorDirectoryChar, "ST_%d_%d", totalLearnEpisodes, stepsLearnEpisode);
	evaluatorDirectory = string(evaluatorDirectoryChar);

	nValues = (int) ceil((rlt_real)totalLearnEpisodes / (rlt_real) episodesBeforeEvaluate);
}
	
CTestSuiteNeededStepsEvaluator::~CTestSuiteNeededStepsEvaluator()
{
	std::list<rlt_real *>::iterator it = succeded->begin();
	for (; it!= succeded->end(); it++)
	{
		delete *it;
	}
	delete succeded;
}

void CTestSuiteNeededStepsEvaluator::clearValues()
{
	CTestSuiteEvaluator::clearValues();	

	std::list<rlt_real *>::iterator it = succeded->begin();
	for (; it!= succeded->end(); it++)
	{
		delete *it;
	}
	succeded->clear();
}


void CTestSuiteNeededStepsEvaluator::loadEvaluationData(CParameters *testSuite, const char *fileName)
{

	FILE *parameterFile = fopen(fileName, "r");

	if (parameterFile == NULL)
	{
		return;
	}

	while (!feof(parameterFile))
	{
		char buffer[256];

		int results = fscanf(parameterFile, "%s\n", buffer);
		while (results == 1 && strcmp(buffer, "<testsuiteevaluation>") != 0 && !feof(parameterFile)) 
		{
			results =  fscanf(parameterFile, "%s\n", buffer);
		}

		if (feof(parameterFile))
		{
			break;
		}

		CParameters *parameters = new CParameters();
		parameters->loadParametersXML(parameterFile);

		if ((*testSuite) == (*parameters))
		{
			fscanf(parameterFile, "<evaluationdata>\n");
			fscanf(parameterFile, "%s\n", buffer);

			rlt_real time = 0.0;

			int bufEpisode;
			int bufSteps;

			rlt_real *trialValues = new rlt_real[nValues];
			memset(trialValues, 0, sizeof(rlt_real) * nValues);
			values->push_back(trialValues);

			rlt_real *trialSucceded = new rlt_real[nValues];
			memset(trialSucceded, 0, sizeof(rlt_real) * nValues);
			succeded->push_back(trialSucceded);

			results = 1;
			bool bOk = true;
			int i = 0;
			while (strcmp(buffer, "</evaluationdata>") != 0 )
			{
				if (buffer[0] == '<' || results <= 0 || feof(parameterFile))
				{
					bOk = false;
					break;
				}

				sscanf(buffer, "%d,", &bufEpisode);

				int n_result = fscanf(parameterFile, "%d: %lf %lf\n", &bufSteps, &trialValues[i], &trialSucceded[i]);

				if (n_result !=  3|| trialValues[i] < getParameter("DivergentEvaluationValue") + 1)
				{
					exception = true;
				}

				results = fscanf(parameterFile, "%s\n", buffer);
				i ++;
			}
			if (bOk)
			{
				while (results == 1 && strcmp(buffer, "</testsuiteevaluation>") != 0 && !feof(parameterFile)) 
				{
					results =  fscanf(parameterFile, "%s\n", buffer);
				}
			}
		}
		delete parameters;	
	}
	fclose(parameterFile);
}

void CTestSuiteNeededStepsEvaluator::doEvaluationTrial(CTestSuite *testSuite, FILE *stream, const char *learnDataFileName)
{
	fprintf(stream, "<testsuiteevaluation>\n");

	testSuite->resetLearnedData();
	testSuite->saveParametersXML(stream);

	rlt_real *trialValues = new rlt_real[nValues];
	rlt_real *trialSucceded = new rlt_real[nValues];

	memset(trialValues, 0, sizeof(rlt_real) * nValues);
	values->push_back(trialValues);


	memset(trialSucceded, 0, sizeof(rlt_real) * nValues);
	succeded->push_back(trialSucceded);

	printf("Evaluating TestSet, %d Trial\n", values->size());

	int nLearnEpisodes = 0;

	rlt_real maxValue = 0;
	int maxIndex = 0;

	time_t startTime = time(NULL);
	time_t endTime;
	int totalSteps = 0;

	rlt_real average = 0.0;

	int nSucc = 0;

	try
	{
		fprintf(stream, "<evaluationdata>\n");
		for (int i = 0; i < nValues; i ++)
		{
			int nEpisodes = 0;
			
			int oldSteps = totalSteps;
			while (nEpisodes < episodesBeforeEvaluate && nLearnEpisodes < totalLearnEpisodes)
			{
				nEpisodes ++;
				nLearnEpisodes ++;

				printf("Learning Episode %d\n", nLearnEpisodes);

				int actualSteps = agent->getTotalSteps();
				testSuite->learn(1, stepsLearnEpisode);
				totalSteps += agent->getTotalSteps() - actualSteps;
				CEnvironmentModel *model = agent->getEnvironmentModel();

				

				if (model->isFailed() || (agent->getTotalSteps() - actualSteps >= stepsLearnEpisode && !maxStepsSucceded))
				{
					printf("Finished Learning (%d steps, failed : ", agent->getTotalSteps() - actualSteps);
					agent->getCurrentState()->getState()->saveASCII(stdout);
					printf(")\n");
				}
				else
				{
					printf("Finished Learning (%d steps, succeded :", agent->getTotalSteps() - actualSteps);
					agent->getCurrentState()->getState()->saveASCII(stdout);
					printf(")\n");

					trialSucceded[i] ++;
					nSucc ++;
				}

			}
			trialSucceded[i] /= nEpisodes;
			trialValues[i] = totalSteps - oldSteps;

			fprintf(stream, "%d, %d: %f %f\n", nLearnEpisodes, totalSteps, trialValues[i], trialSucceded[i]);
		}
		fprintf(stream, "</evaluationdata>\n");
		fprintf(stream, "<totalsteps> %d </totalsteps>\n", totalSteps);
		fprintf(stream, "<percentsucceded> %f </percentsucceded>\n", ((rlt_real) nSucc) / totalLearnEpisodes);


		printf("Evaluated Value: %f\n", totalSteps);
		endTime = time(NULL);
		printf("Time needed for Evaluation: %f\n", difftime(endTime, startTime));
		fprintf(stream, "<evaluationtime> %f </evaluationtime>\n", difftime(endTime, startTime)), 

		fprintf(stream, "<learndatafile> %s </learndatafile>\n", learnDataFileName);

		if (learnDataFileName)
		{
			FILE *learnDataFile = fopen(learnDataFileName, "w");

			testSuite->saveLearnedData(learnDataFile);
			fclose(learnDataFile);
		}
	}
	catch (CMyException *E)
	{
		printf(E->getErrorMsg().c_str());
		fprintf(stream, "%d, %d: %f 0.0\n", nLearnEpisodes, totalSteps, getParameter("DivergentEvaluationValue"));
		fprintf(stream, "</evaluationdata>\n");
		fprintf(stream, "<totalsteps> %f </totalsteps>\n", getParameter("DivergentEvaluationValue"));
		fprintf(stream, "<exception> %s </exception>\n", E->getErrorMsg().c_str());

		exception = true;

	}
	fprintf(stream, "</testsuiteevaluation>\n\n");
	fflush(stream);	
}

rlt_real CTestSuiteNeededStepsEvaluator::getEvaluationValue(std::list<rlt_real *> *values)
{
	std::list<rlt_real *>::iterator it = values->begin();

	rlt_real steps = 0;

	for (; it != values->end();it++)
	{
		for (int i = 0; i < nValues; i++)
		{
			steps += (*it)[i];
		}
	}
	
	return steps / values->size();
}

rlt_real CTestSuiteNeededStepsEvaluator::getPercentageSucceded()
{
	std::list<rlt_real *>::iterator it = succeded->begin();

	rlt_real succ = 0;

	for (; it != succeded->end();it++)
	{
		for (int i = 0; i < nValues; i++)
		{
			succ += (*it)[i];
		}
		succ /= nValues;
	}
	succ /= succeded->size();
	
	return succ;
}



/*
CTestSuiteEpisodesToLearnEvaluator::CTestSuiteEpisodesToLearnEvaluator(CAgent *agent, int neededSuccEpisodes, int maxEpisodes, int stepsPerEpisode) : CTestSuiteEvaluator(agent)
{
	this->neededSuccEpisodes = neededSuccEpisodes;
	this->maxEpisodes = maxEpisodes;
	this->stepsPerEpisode = stepsPerEpisode;

	char evaluatorDirectoryChar[80];
	sprintf(evaluatorDirectoryChar, "ETL_%d_%d_%d", neededSuccEpisodes, maxEpisodes, stepsPerEpisode);
	evaluatorDirectory = string(evaluatorDirectoryChar);
}

rlt_real CTestSuiteEpisodesToLearnEvaluator::evaluateTestSuite(CTestSuite *testSuite, FILE *stream, char *learnDataFileName)
{
	int nEpisodes = 0;
	int nSuccEpisodes = 0;
	
	if (stream)
	{
		fprintf(stream, "Evaluating new Testsuite\n");
	}

	try
	{
		while (nEpisodes < maxEpisodes && nSuccEpisodes < neededSuccEpisodes)
		{
			if (stream)
			{
				fprintf(stream, "%d ", nEpisodes);
			}
			testSuite->learn(1, stepsPerEpisode);

			if (isEpisodeSuccessFull(stream))
			{
				nSuccEpisodes ++;
				printf("Episode %d Ended SuccessFull (%d)\n",nEpisodes, nSuccEpisodes );
			}
			else
			{
				nSuccEpisodes = 0;
				printf("Episode %d failed\n",nEpisodes );

			}
			nEpisodes ++;
		}
	}
	catch (CDivergentVFunctionException *E)
	{
		printf(E->getErrorMsg().c_str());
		return getParameter("DivergentEvaluationValue");
	}

	if (learnDataFileName)
	{
		FILE *learnDataFile = fopen(learnDataFileName, "w");

		testSuite->saveLearnedData(learnDataFile);
		fclose(learnDataFile);

	}
	

	return maxEpisodes - nEpisodes;
}
*/

CAverageRewardTestSuiteEvaluator::CAverageRewardTestSuiteEvaluator(CAgent *agent, string testSuiteCollectionName, CPolicyEvaluator *evaluator, int totalLearnEpisodes, int episodesBeforeEvaluate, int stepsLearnEpisode, int nTrials) : CTestSuiteEvaluator(agent, testSuiteCollectionName, nTrials, (int) ceil((rlt_real)totalLearnEpisodes / (rlt_real) episodesBeforeEvaluate))
{
	this->totalLearnEpisodes = totalLearnEpisodes;
	this->episodesBeforeEvaluate = episodesBeforeEvaluate;
	this->evaluators = new std::list<CPolicyEvaluator *>();
	evaluators->push_back(evaluator);
	this->stepsLearnEpisode = stepsLearnEpisode;

	char evaluatorDirectoryChar[80];
	sprintf(evaluatorDirectoryChar, "AR_%d_%d_%d", totalLearnEpisodes, episodesBeforeEvaluate, stepsLearnEpisode);
	evaluatorDirectory = string(evaluatorDirectoryChar);

	nAverageRewards = (int) ceil((rlt_real)totalLearnEpisodes / (rlt_real) episodesBeforeEvaluate);

	evaluationFunction = ARCF_AVERAGE;
}

CAverageRewardTestSuiteEvaluator::~CAverageRewardTestSuiteEvaluator()
{
	delete evaluators;
}


void CAverageRewardTestSuiteEvaluator::getXLabel(char *xLabel, int i)
{
	sprintf(xLabel, "%d", i* episodesBeforeEvaluate);
}

void CAverageRewardTestSuiteEvaluator::addPolicyEvaluator(CPolicyEvaluator *evaluator)
{
	evaluators->push_back(evaluator);
}

void CAverageRewardTestSuiteEvaluator::doEvaluationTrial(CTestSuite *testSuite, FILE *stream, const char *learnDataFileName)
{
	fprintf(stream, "<testsuiteevaluation>\n");

	testSuite->resetLearnedData();
	testSuite->saveParametersXML(stream);

	rlt_real *trialValues = new rlt_real[nAverageRewards];

	memset(trialValues, 0, sizeof(rlt_real) * nAverageRewards);
	values->push_back(trialValues);

	printf("Evaluating TestSet, %d Trial\n", values->size());

	int nLearnEpisodes = 0;

	rlt_real maxValue = 0;
	int maxIndex = 0;

	time_t startTime = time(NULL);
	time_t endTime;
	int totalSteps = 0;

	rlt_real average = 0.0;

	CPolicyEvaluator *evaluator = *evaluators->begin();
	try
	{
		fprintf(stream, "<evaluationdata>\n");
		for (int i = 0; i < nAverageRewards; i ++)
		{
			int nEpisodes = 0;
			

			while (nEpisodes < episodesBeforeEvaluate && nLearnEpisodes < totalLearnEpisodes)
			{
				nEpisodes ++;
				nLearnEpisodes ++;

				printf("Learning Episode %d\n", nLearnEpisodes);

				int actualSteps = agent->getTotalSteps();
				testSuite->learn(1, stepsLearnEpisode);
				totalSteps += agent->getTotalSteps() - actualSteps;
				CEnvironmentModel *model = agent->getEnvironmentModel();

				if (model->isFailed())
				{
					printf("Finished Learning (%d steps, failed : ", agent->getTotalSteps() - actualSteps);
					agent->getCurrentState()->getState()->saveASCII(stdout);
					printf(")\n");
				}
				else
				{
					printf("Finished Learning (%d steps, succeded :", agent->getTotalSteps() - actualSteps);
					agent->getCurrentState()->getState()->saveASCII(stdout);
					printf(")\n");

				}

			}

			agent->setController(testSuite->getEvaluationController());
			
			trialValues[i] = evaluator->evaluatePolicy();
			average += trialValues[i];
			agent->setController(testSuite->getController());

			if ((i == 0) || maxValue < trialValues[i])
			{
				maxValue = trialValues[i];
				maxIndex = i;
			}

			printf("Value after %d (Steps %d) Episodes: %f", nLearnEpisodes, totalSteps, trialValues[i]);
			if (evaluators->size() > 1)
			{
				std::list<CPolicyEvaluator *>::iterator it = evaluators->begin();
				it++;

				for (int i = 1; it != evaluators->end(); it++, i++)
				{
					rlt_real value = (*it)->evaluatePolicy();
					printf(" Evaluator %d: %f ", i,value);
				}
			}
			printf("\n");

			fprintf(stream, "%d, %d: %f\n", nLearnEpisodes, totalSteps, trialValues[i]);
		}
		fprintf(stream, "</evaluationdata>\n");
		fprintf(stream, "<averagevalue> %f </averagevalue>\n", average / nAverageRewards);
		fprintf(stream, "<bestvalue> %f </bestvalue>\n", maxValue);

		printf("Evaluated Value: %f, Best Value: %f after %d LearnEpisodes\n", average / nAverageRewards, maxValue, maxIndex);
		endTime = time(NULL);
		printf("Time needed for Evaluation: %f\n", difftime(endTime, startTime));
		fprintf(stream, "<evaluationtime> %f </evaluationtime>\n", difftime(endTime, startTime)), 

		fprintf(stream, "<learndatafile> %s </learndatafile>\n", learnDataFileName);

		if (learnDataFileName)
		{
			FILE *learnDataFile = fopen(learnDataFileName, "w");

			testSuite->saveLearnedData(learnDataFile);
			fclose(learnDataFile);
		}
	}
	catch (CMyException *E)
	{
		printf(E->getErrorMsg().c_str());
		fprintf(stream, "%d, %d: %f\n", nLearnEpisodes, totalSteps, getParameter("DivergentEvaluationValue"));
		fprintf(stream, "</evaluationdata>\n");
		fprintf(stream, "<averagevalue> %f </averagevalue>\n", getParameter("DivergentEvaluationValue"));
		fprintf(stream, "<bestvalue> %f </bestvalue>\n", getParameter("DivergentEvaluationValue"));
		fprintf(stream, "<exception> %s </exception>\n", E->getErrorMsg().c_str());

		exception = true;

	}
	fprintf(stream, "</testsuiteevaluation>\n\n");
	fflush(stream);	
}

void CAverageRewardTestSuiteEvaluator::loadEvaluationData(CParameters *testSuite, const char  *fileName)
{
	FILE *parameterFile = fopen(fileName, "r");

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -