⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 adaptiveoptimaser.cpp

📁 路径规划源程序
💻 CPP
字号:
//#include "StdAfx.h"
#include "AdaptiveOptimaser.h"
#ifndef USE_TRADITIONAL_AKF


CAdaptiveOptimaser::CAdaptiveOptimaser(void):m_iCovEstNum(4),m_mResidualCovStack(4),m_calResidualCoviarance(2,2)
,m_mTherioticalResCov(2,2),m_SumOfAllResCov(2,2),m_mDif_ResCov2TherResCov(2,2),m_mCovDifStack(3),m_mPreDeltaStep(2,2),
m_mAlterStepVal(2,2),m_mPreErDif(2,2),m_mNewModelNoise(4,4),m_mCurModeNoiseModStep(4,4)
//,m_dStableDifThres(2)
{
	m_iNumOfInputResidual=0;
	//m_bStableTag=false;
	m_bStableTag[0]=m_bStableTag[1]=false;
	for (int i=0;i<2;i++)
	{
		for (int j=0;j<2;j++)
		{
			m_SumOfAllResCov(i,j)=0;

			m_mPreDeltaStep(i,j)=0.9;
			m_mPreErDif(i,j)=1;
		}
	}
	for (int i=0;i<4;i++)
	{
		for (int j=0;j<4;j++)
		{
			m_mCurModeNoiseModStep(i,j)=0;
		}
	}

	//memset(m_mCurModeNoiseModStep.data(),0,m_mCurModeNoiseModStep.size1()*m_mCurModeNoiseModStep.size2());
	m_iNumOfCovDif=0;
}
CAdaptiveOptimaser::CAdaptiveOptimaser(int CovEstNum,size_t ObserveSize,size_t StateSize):m_iCovEstNum(CovEstNum),m_mResidualCovStack(CovEstNum),
m_calResidualCoviarance(ObserveSize,ObserveSize),m_mTherioticalResCov(ObserveSize,ObserveSize),m_SumOfAllResCov(ObserveSize,ObserveSize),
m_mDif_ResCov2TherResCov(ObserveSize,ObserveSize),m_mCovDifStack(3),//,m_dStableDifThres(ObserveSize)
m_mPreDeltaStep(CovEstNum,CovEstNum),m_mAlterStepVal(CovEstNum,CovEstNum),m_mPreErDif(CovEstNum,CovEstNum),m_mNewModelNoise(StateSize,StateSize),
m_mCurModeNoiseModStep(StateSize,StateSize)
{
	m_iNumOfInputResidual=0;
	for (int i=0;i<ObserveSize;i++)
	{
		for (int j=0;j<ObserveSize;j++)
		{
			m_SumOfAllResCov(i,j)=0;
	/*		m_mCurModeNoiseModStep(i,j)=0;
			m_mCurModeNoiseModStep(i+2,j+2)=0;*/
			m_mPreDeltaStep(i,j)=0.9;
			m_mPreErDif(i,j)=1;
		}
	}
	for (int i=0;i<ObserveSize+2;i++)
	{
		for (int j=0;j<ObserveSize+2;j++)
		{
			m_mCurModeNoiseModStep(i,j)=0;
		}
	}
	m_bStableTag[0]=m_bStableTag[1]=false;
	m_iNumOfCovDif=0;
}
#else
CAdaptiveOptimaser::CAdaptiveOptimaser(void):m_iCovEstNum(4),m_mResidualCovStack(4),m_calResidualCoviarance(2,2)
,m_mTherioticalResCov(2,2),m_SumOfAllResCov(2,2),m_mCovDifStack(3),m_mNewModelNoise(4,4)
//,m_mAlterStepVal(2,2),m_mPreErDif(2,2),m_mCurModeNoiseModStep(4,4),m_mDif_ResCov2TherResCov(2,2),m_mPreDeltaStep(2,2)
//,m_dStableDifThres(2)
{
	m_iNumOfInputResidual=0;
	for (int i=0;i<2;i++)
	{
		for (int j=0;j<2;j++)
		{
			m_SumOfAllResCov(i,j)=0;
		}
	}
}
#endif

CAdaptiveOptimaser::~CAdaptiveOptimaser(void)
{
}
#ifdef USE_TRADITIONAL_AKF
VARIANCE& CAdaptiveOptimaser::ModelNoiseAdapter(RESIDUAL_STYLE Residual,VARIANCE& PredictVar,VARIANCE& StateVar,matrix_type& KalmanGain)
{
	
		if(CalculateTheroticalCovOfResidual(Residual))//若计算得到理论残差方差
		{
			//__asm int 3;
			ConventionalAdapter( PredictVar,StateVar,m_mTherioticalResCov,KalmanGain);
		}
		else
		{
			//目前没有什么做的
		}		
		return m_mNewModelNoise;
}
#endif
#ifndef USE_TRADITIONAL_AKF
VARIANCE& CAdaptiveOptimaser::ModelNoiseAdapter(RESIDUAL_STYLE Residual, VARIANCE ResidualCoviarance)
{
	//VARIANCE QStep();
	range Line(0,2);
	range Row(0,2); 
	range Line2(2,4);
	range Row2(2,4); 
#ifdef  DEBUG_USE_FLAG
	ViewMatrix(m_mNewModelNoise);
	m_mSigma_Viewer;
#endif
	if(CalculateTheroticalCovOfResidual(Residual,ResidualCoviarance))//若计算得到理论残差方差
	{
		if(CalculateDifOfError())//若计算得到误差的导数
		{
//			__asm int 3;
			if ((!m_bStableTag[0])||(!m_bStableTag[1]))//更改为逐位判断是否稳态,有一位不稳定则寻幽
			{
				for (unsigned i=0;i<m_mCurrentErDif.size1();i++)
				{
					if (!m_bStableTag[i])//不稳定
					{
					
					//for (int j=0;j<m_mCurrentErDif.size2();j++)//因m_mNewModelNoise为对角阵,故去掉此部分
					//{
						unsigned j=i;
						m_mAlterStepVal(i,j)=1+RPPOP_DeltaStepSeek(m_mCurrentErDif(i,j),m_mPreErDif(i,j),m_mPreDeltaStep(i,j));//为提高效率将matlab程序中的1+步长值放这运算了。
					}
#ifdef  DEBUG_USE_FLAG

				ViewMatrix(m_mAlterStepVal);
				m_mSigma_Viewer;
#endif
					//}
				}
				
				project(m_mCurModeNoiseModStep, Line, Row)=m_mAlterStepVal;//将结果重复组合成Q阵的变化阵
				project(m_mCurModeNoiseModStep, Line2, Row2)=m_mAlterStepVal;         
//#ifdef  DEBUG_USE_FLAG
//
//				ViewMatrix(m_mCurModeNoiseModStep);
//				m_mSigma_Viewer;
//#endif
//
//#ifdef  DEBUG_USE_FLAG
//
//				ViewMatrix(m_mNewModelNoise);
//				m_mSigma_Viewer;
//#endif
//
				m_mNewModelNoise=element_prod(m_mCurModeNoiseModStep,m_mNewModelNoise);	
				LimitModifiedModelNoise(m_mNewModelNoise);
//#ifdef  DEBUG_USE_FLAG
//
//				ViewMatrix(m_mNewModelNoise);
//				m_mSigma_Viewer;
//#endif
			}		
			OptmiseInStableCheck(m_mAlterStepVal);//判断寻优是否稳定,本次调整步长的变化值放在
		}

	}
	else
	{
		//目前没有什么做的
	}		
	return m_mNewModelNoise;
}

bool CAdaptiveOptimaser::CalculateTheroticalCovOfResidual(RESIDUAL_STYLE& Residual,VARIANCE ResidualCov)
{
#ifdef  DEBUG_USE_FLAG

	ViewVector(Residual);
	m_mSigma_Viewer;
#endif
	if(m_iNumOfInputResidual>=m_iCovEstNum)//已经传了足够多的Residual供计算的话才开始寻优
	{
		m_calResidualCoviarance=outer_prod(Residual,Residual);//残差计算方差Residual*Residua'boost中的向量是行向量所以用outer_prod

		m_mResidualCovStack.insert(m_mResidualCovStack.begin(),m_calResidualCoviarance);//入一个放头部,最新的放头部
		std::vector<VARIANCE>::iterator EndOfStack=m_mResidualCovStack.end();
//#ifdef  DEBUG_USE_FLAG
//
//		ViewMatrix(*(EndOfStack-1));
//		m_mSigma_Viewer;
//#endif
#ifdef  DEBUG_USE_FLAG

		ViewMatrix(m_SumOfAllResCov);
		m_mSigma_Viewer;
#endif
		m_SumOfAllResCov=m_SumOfAllResCov-(*(--EndOfStack));//用上一次算得的和减去要挤出m_mResidualCovStack的末项,再加上新插入的项即得总和 
		m_SumOfAllResCov+=m_calResidualCoviarance;
		m_mTherioticalResCov=(m_SumOfAllResCov)/m_iCovEstNum;

		m_mResidualCovStack.pop_back();//删除最后一个	
		/////////////////////////计算差值,并按位取绝对值/////////////////////////////////////////////////
		m_mDif_ResCov2TherResCov=ResidualCov-m_mTherioticalResCov;//ls-TheroResidulCov
		MatElemABS(m_mDif_ResCov2TherResCov);
#ifdef  DEBUG_USE_FLAG

		ViewMatrix(m_mDif_ResCov2TherResCov);
		m_mSigma_Viewer;
#endif
////////////////////////////将计算出来的差值保存起来,并且总共只保存3个,因此要存一个,出一个,第一个最新的//////////////////////////////////////////////
		m_mCovDifStack.insert(m_mCovDifStack.begin(),m_mDif_ResCov2TherResCov);
		m_mCovDifStack.pop_back();//删除最后一个h
		if (m_iNumOfCovDif>2)
		{
			return true;
		}
		else
		{
			m_iNumOfCovDif++;
			return false;
		}		
	}
	else
	{
		m_calResidualCoviarance=outer_prod(Residual,Residual);//残差计算方差Residual*Residua'boost中的向量是行向量所以用outer_prod
		m_iNumOfInputResidual++;
		m_SumOfAllResCov+=m_calResidualCoviarance;
#ifdef  DEBUG_USE_FLAG

		ViewMatrix(m_SumOfAllResCov);
		m_mSigma_Viewer;
#endif
		m_mResidualCovStack.insert(m_mResidualCovStack.begin(),m_calResidualCoviarance);//入一个放头部,最新的放头部
		m_mResidualCovStack.pop_back();//删除最后一个	
		return false;
	}
}
#endif
#ifdef USE_TRADITIONAL_AKF
bool CAdaptiveOptimaser::CalculateTheroticalCovOfResidual(RESIDUAL_STYLE& Residual)
{
#ifdef  DEBUG_USE_FLAG

	ViewVector(Residual);
	m_mSigma_Viewer;
#endif
	if(m_iNumOfInputResidual>=m_iCovEstNum)//已经传了足够多的Residual供计算的话才开始寻优
	{
		m_calResidualCoviarance=outer_prod(Residual,Residual);//残差计算方差Residual*Residua'boost中的向量是行向量所以用outer_prod

		m_mResidualCovStack.insert(m_mResidualCovStack.begin(),m_calResidualCoviarance);//入一个放头部,最新的放头部
		std::vector<VARIANCE>::iterator EndOfStack=m_mResidualCovStack.end();
		//#ifdef  DEBUG_USE_FLAG
		//
		//		ViewMatrix(*(EndOfStack-1));
		//		m_mSigma_Viewer;
		//#endif
#ifdef  DEBUG_USE_FLAG

		ViewMatrix(m_SumOfAllResCov);
		m_mSigma_Viewer;
#endif
		m_SumOfAllResCov=m_SumOfAllResCov-(*(--EndOfStack));//用上一次算得的和减去要挤出m_mResidualCovStack的末项,再加上新插入的项即得总和 
		m_SumOfAllResCov+=m_calResidualCoviarance;
		m_mTherioticalResCov=(m_SumOfAllResCov)/m_iCovEstNum;

		m_mResidualCovStack.pop_back();//删除最后一个	
		return true;
				
	}
	else
	{
		m_calResidualCoviarance=outer_prod(Residual,Residual);//残差计算方差Residual*Residua'boost中的向量是行向量所以用outer_prod
		m_iNumOfInputResidual++;
		m_SumOfAllResCov+=m_calResidualCoviarance;
#ifdef  DEBUG_USE_FLAG

		ViewMatrix(m_SumOfAllResCov);
		m_mSigma_Viewer;
#endif
		m_mResidualCovStack.insert(m_mResidualCovStack.begin(),m_calResidualCoviarance);//入一个放头部,最新的放头部
		m_mResidualCovStack.pop_back();//删除最后一个	
		return false;
	}
}
#endif 
#ifndef USE_TRADITIONAL_AKF

bool CAdaptiveOptimaser::CalculateDifOfError()
{
	std::vector<VARIANCE>::iterator HeadOfStack=m_mCovDifStack.begin();
//	std::vector<VARIANCE>::iterator p2Stack;
//#ifdef  DEBUG_USE_FLAG
//
//	ViewMatrix(VARIANCE(*HeadOfStack));
//	m_mSigma_Viewer;
//#endif
//#ifdef  DEBUG_USE_FLAG
//
//	ViewMatrix(VARIANCE(*(++HeadOfStack)));
//	m_mSigma_Viewer;
//#endif
	//p2Stack=HeadOfStack+1;
	size_t Size=(*HeadOfStack).size1();
	VARIANCE aTmp=(*HeadOfStack);
	VARIANCE bTmp=(*(++HeadOfStack));	
	m_mCurrentErDif=aTmp-bTmp;//当前时刻的误差函数的导数,用差值代替,而且由于RPPOD算法仅用到了导数的符号,所以没有考虑时间长度
	
	aTmp=*(++HeadOfStack);

	for (int i=0;i<Size;i++)
	{
		for (int j=0;j<Size;j++)
		{
			if (m_mPreErDif(i,j)!=0)//防止因外部传递过来的0被忽略
			{
				m_mPreErDif(i,j)=bTmp(i,j)-aTmp(i,j);//前一时刻的误差函数的导数,用差值代替,而且由于RPPOD算法仅用到了导数的符号,所以没有考虑时间长度
			}
		}
		
	}
	
	return true;
}
bool CAdaptiveOptimaser::OptmiseInStableCheck(VARIANCE& CovAlterStep)
{
	//double a=fabs(CovAlterStep(0,0)-1);//寻优步长的变化值即为最终步长值减去基准1
	//double b=fabs(CovAlterStep(1,1)-1);//寻优步长的变化值即为最终步长值减去基准1
	//double c=a+b;
	for (unsigned iii=0;iii<2;iii++)
	{
		double a=fabs(CovAlterStep(iii,iii)-1);//寻优步长的变化值即为最终步长值减去基准1
		if ((!m_bStableTag[iii])&&a<STABLE_THRES)// %进入稳定  
		{
			m_dStableDifThres[iii]=m_fStableDifAmplitudeParm*m_mDif_ResCov2TherResCov(iii,iii);//%记录方差偏差的当前值*m_fStableDifAmplitudeParm作为判断是否跳出稳定的阈值
			//m_dStableDifThres[0]=m_fStableDifAmplitudeParm*m_mDif_ResCov2TherResCov(0,0);//%记录方差偏差的当前值*m_fStableDifAmplitudeParm作为判断是否跳出稳定的阈值
			//m_dStableDifThres[1]=m_fStableDifAmplitudeParm*m_mDif_ResCov2TherResCov(1,1);//%记录方差偏差的当前值*m_fStableDifAmplitudeParm作为判断是否跳出稳定的阈值
			m_bStableTag[iii]=true;
		}
		else if (
			m_bStableTag[iii] && 
			(m_mDif_ResCov2TherResCov(iii,iii)>m_dStableDifThres[iii])
			)// %退出稳定,则重设本优化器  
		{
			m_bStableTag[iii]=false;
			m_mNewModelNoise(iii,iii)=m_mNodelNoiseOriginalVal(iii,iii);//%重置初始值,并重开始一轮寻优
			m_mNewModelNoise(iii+2,iii+2)=m_mNodelNoiseOriginalVal(iii+2,iii+2);//%重置初始值,并重开始一轮寻
			m_mPreDeltaStep(iii,iii)*=0;
			m_mPreErDif(iii,iii)*=0;
			m_mPreErDif(iii,iii)=0.9;
			m_mPreDeltaStep(iii,iii)=0.9;
			//for (int i=0;i<m_mPreErDif.size1();i++)//%重置初始值,并重开始一轮寻优
			//{
			//	m_mPreErDif(i,i)=0.9;
			//	m_mPreDeltaStep(i,i)=0.9;
			//}
		}
	}
	if (m_bStableTag[0]&&m_bStableTag[1])
	{
		return true ;//所有位都稳定则返回镇
	}
	else
	{
		return false;
	}
	
}
#endif
bool CAdaptiveOptimaser::initAdapter(VARIANCE& NodelNoiseOriginalVal,float StableDifAmplitudeParm)
{
	m_mNodelNoiseOriginalVal=NodelNoiseOriginalVal;
	m_fStableDifAmplitudeParm=StableDifAmplitudeParm;
	m_mNewModelNoise=m_mNodelNoiseOriginalVal;

	return true;
}

// 限制模型噪声不太大也别太小,
//bool CAdaptiveOptimaser::LimitModNoiseInSafeVal(double&ModelNoise)
//{
//
//}
// 对寻优调整的结果(模型噪声)进行限幅,不能太大也不能太小,输入为寻优的结果矩阵的引用
//void CAdaptiveOptimaser::LimitModifiedModelNoise(VARIANCE& ModifyResult)
//{
//
//}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -