📄 codec.cpp
字号:
for( j=0; j<NodeNum; j++ )
{
if( ConvSpc.Bcode[j][0] == 0 ) ConvSpc.CodeInf[m][j][0] = Tail[2*i] + Tail[2*i+1];
else ConvSpc.CodeInf[m][j][0] = Tail[2*i];
if( ConvSpc.Bcode[j][1] == 0 ) ConvSpc.CodeInf[m][j][1] = Tail[2*i+1];
else ConvSpc.CodeInf[m][j][1] = 0;
}
m++;
}
}
/******************************************************************************************
function : sub-function of SpcCoDec::Apriori( float *Parity )
******************************************************************************************/
void SpcCoDec::PrePrtDec( float *Zin, float *Parity, float *bf, int blength )
{
int i, m = blength - 1;
float a=0;
if( blength > 1 )
{
bf[blength-2] = Zin[m--];
for( i=blength-3; i>=0; i--) bf[i] = Plr( bf[i+1], Zin[m--] );
a = Zin[m++];
*Parity = Plr( bf[0], a );
for( i=1; i<blength-1; i++)
{
bf[i] = Plr( bf[i], a );
a = Plr( Zin[m++], a );
}
bf[i]= a;
}
else if( blength == 1 ) { bf[0]=LR; *Parity=*Zin; }
else if( blength == 0 ) *Parity =LR;
}
/******************************************************************************************
function : parity likelihood ratio -- handle the probability likelihood value
******************************************************************************************/
/*float SpcCoDec::Plr( float x, float y )
{
return ( x * y + 1 ) / ( x + y );
}*/
float SpcCoDec::Plr( float x, float y )
{
float z = (float) MIN( fabs(x), fabs(y) );
if( ( x * y ) < 0 ) z = -z;
return z + (float) log( 1 + exp(-fabs(x+y)) ) - (float) log( 1 + exp(-fabs(x-y)) );
}
/******************************************************************************************
function :
******************************************************************************************/
/*float SpcCoDec::Clip( float x )
{
if( x > TOOLARGE ) return TOOLARGE;
else if( x < TOOSMALL ) return TOOSMALL;
return x;
}*/
/******************************************************************************************
function : pre-decoding of turbo-SPC
******************************************************************************************/
void SpcCoDec::PreDecode( )
{
int i, j, nuser;
for( nuser=0; nuser<NUser; nuser++ ) for( i=0; i<NestDim; i++ ) for( j=0; j<DataLen; j++ )
ExInf[nuser][i][j] = 0;
for( i=0; i<DataLen ;i++ )
Sum_Ex[i] = 0.0; // LLR
}
/******************************************************************************************
function : initialization of the convolutional CODEC
allocate the dynamic memory and setup **Flnk, **Bcode, etc
input : _NodeOrder, _TrellisLen, _Npoly and _Dpoly
******************************************************************************************/
void Conv::Init( int _NodeOrder, int _TrellisLen, int _Npoly, int _Dpoly )
{
int i, j;
NodeOrder = _NodeOrder;
NodeNum = ( 1 << NodeOrder );
TrellisLen = _TrellisLen;
tmp = new float [NodeNum];
TermTable = new int* [NodeNum];
for( i=0; i<NodeNum; i++ ) TermTable[i] = new int [2*NodeOrder];
Flnk = new int* [NodeNum];
Bcode = new int* [NodeNum];
for( i=0; i<NodeNum; i++ )
{
Flnk[i] = new int [2];
Bcode[i] = new int [2];
}
Rcd = new float* [TrellisLen+1];
for( i=0; i<=TrellisLen; i++ ) Rcd[i] = new float [NodeNum];
CodeInf = new float** [TrellisLen];
aPP = new float* [TrellisLen];
for( i=0; i<TrellisLen; i++ )
{
aPP[i] = new float [4];
CodeInf[i] = new float* [NodeNum];
}
for( i=0; i<TrellisLen; i++ ) for( j=0; j<NodeNum; j++ )
CodeInf[i][j] = new float [2];
Conv::SysCodeTable( _Npoly, _Dpoly );
Conv::TailGen( );
}
/******************************************************************************************
function : convolutional encoder
input : _DataIn and _StartS
output : _ParityOut and _Ends
******************************************************************************************/
void Conv::Encoder( int *_DataIn, int *_ParityOut, int *_EndS )
{
int i;
// for( i=0; i<TrellisLen; i++ )
for( i=0; i<TrellisLen-NodeOrder; i++ )
{
_ParityOut[i] = Bcode[*_EndS][_DataIn[i]];
*_EndS = Flnk[*_EndS][_DataIn[i]];
}
}
/******************************************************************************************
function : convolutional decoder
input : _StartProb and _EndProb ( _CodeInf[][2][2] -- a priori branch probability)
output : _Rcd[][4] -- a posteriori branch probability
******************************************************************************************/
void Conv::BCJR( float *_StartProb, float *_EndProb )
{
Conv::BwSearch( _StartProb, _EndProb );
Conv::AppDecode( _StartProb, _EndProb );
}
/******************************************************************************************
function : a posteriori probability decoding of each trellis section
input : _StartProb and _EndProb ( _CodeInf[][2][2] )
output : _Rcd[][4]
******************************************************************************************/
void Conv::AppDecode( float *_StartProb, float *_EndProb )
{
int i, j;
for( i=0; i<TrellisLen; i++ )
{
for( j=0; j<4; j++ ) aPP[i][j] = LS;
for( j=0; j<NodeNum; j++ )
{
if( Bcode[j][0] == 0 )
aPP[i][0] = LlrAdd( aPP[i][0], Rcd[i][j]+CodeInf[i][j][0]+Rcd[i+1][Flnk[j][0]] );
else//if( Bcode[j][0] == 1 )
aPP[i][1] = LlrAdd( aPP[i][1], Rcd[i][j]+CodeInf[i][j][0]+Rcd[i+1][Flnk[j][0]] );
if( Bcode[j][1] == 0 )
aPP[i][2] = LlrAdd( aPP[i][2], Rcd[i][j]+CodeInf[i][j][1]+Rcd[i+1][Flnk[j][1]] );
else//if( Bcode[j][1] == 1 )
aPP[i][3] = LlrAdd( aPP[i][3], Rcd[i][j]+CodeInf[i][j][1]+Rcd[i+1][Flnk[j][1]] );
}
Conv::FwSearch( i );
}
for( i=0; i<NodeNum; i++ )
{
_StartProb[i] = tmp[i];
_EndProb[i] = Rcd[TrellisLen][i];
}
}
/******************************************************************************************
function : forward search of the BCJR decoding algorithm
******************************************************************************************/
void Conv::FwSearch( int i )
{
int j;
for( j=0; j<NodeNum; j++ ) Rcd[i+1][j] = LS;
for( j=0; j<NodeNum; j++ )
{
Rcd[i+1][Flnk[j][0]] = LlrAdd( Rcd[i+1][Flnk[j][0]], Rcd[i][j]+CodeInf[i][j][0] );
Rcd[i+1][Flnk[j][1]] = LlrAdd( Rcd[i+1][Flnk[j][1]], Rcd[i][j]+CodeInf[i][j][1] );
}
// Conv::StateNorm( &Rcd[i+1][0] );
}
/******************************************************************************************
function : backward search of the BCJR decoding algorithm
input : _StartProb and _EndProb
output : _Rcd[][4]
******************************************************************************************/
void Conv::BwSearch( float *_StartProb, float *_EndProb )
{
int i, j;
for( i=0; i<NodeNum; i++ )
Rcd[TrellisLen][i] = _EndProb[i]; // Initialization of the backward search
for( i=TrellisLen-1; i>=0; i-- )
{
for( j=0; j<NodeNum; j++ )
Rcd[i][j] = LlrAdd( Rcd[i+1][Flnk[j][0]]+CodeInf[i][j][0], Rcd[i+1][Flnk[j][1]]+CodeInf[i][j][1] );
// StateNorm( &Rcd[i][0] );
}
for( i=0; i<NodeNum; i++ )
{
tmp[i] = Rcd[0][i];
Rcd[0][i] = _StartProb[i];
}
}
/******************************************************************************************
function : normalization the probability of the state
input : Rcd[i][]
******************************************************************************************/
/*void Conv::StateNorm( float *rcd )
{
int i;
float a=0;
for( i=0; i<NodeNum; i++ ) a += rcd[i];
for( i=0; i<NodeNum; i++ ) rcd[i] /= a;
}*/
void Conv::StateNorm( float *rcd )
{
int i;
float a;
a = rcd[0];
for( i=1; i<NodeNum; i++ ) a = LlrAdd(a, rcd[i]);
for( i=0; i<NodeNum; i++ ) rcd[i] -= a;
}
/******************************************************************************************
function : free the allocated memory
******************************************************************************************/
void Conv::End( )
{
int i, j;
delete [] tmp;
for( i=0; i<NodeNum; i++ )
{
delete [] Flnk[i];
delete [] Bcode[i];
}
delete [] Flnk;
delete [] Bcode;
for( i=0; i<=TrellisLen; i++ ) delete [] Rcd[i];
delete [] Rcd;
for( i=0; i<TrellisLen; i++ )
{
delete [] aPP[i];
for( j=0; j<NodeNum; j++ ) delete [] CodeInf[i][j];
}
delete [] aPP;
for( i=0; i<TrellisLen; i++ ) delete [] CodeInf[i];
delete [] CodeInf;
}
/******************************************************************************************
function : setup Flnk[][]
input : _Npoly and _Dpoly
******************************************************************************************/
void Conv::SysCodeTable( int Npoly, int Dpoly )
{
int i, j, m, n;
for( i=0; i < NodeNum; i++ )
{
m = (i<<1) & Dpoly;
for( n=0, j=1; j<=NodeOrder; j++ )
n ^= (m>>j) % 2;
n &= (Dpoly%2); //n is the feedback
j = i & ( NodeNum / 2 - 1 ); //i is current state, j is next state
Flnk[i][0] = n + ( j << 1 );
Flnk[i][1] = ( 1 ^ n ) + ( j << 1);
Conv::BranchCode( i, n, Npoly );
}
}
/******************************************************************************************
function : setup Bcode[][]
input : _Npoly, current state (i) and feedback input (n)
******************************************************************************************/
void Conv::BranchCode( int i, int n, int Npoly )
{
int n0, n1, j;
n0 = (i<<1) + n;
n1 = (i<<1) + (1^n);
n0 &= Npoly;
n1 &= Npoly;
Bcode[i][0] = Bcode[i][1] = 0;
for( j=0; j<=NodeOrder; j++ )
{
Bcode[i][0] ^= (n0>>j) % 2;
Bcode[i][1] ^= (n1>>j) % 2;
}
}
void Conv::TailGen( )
{
int i, j, k, tmp;
int *TailState;
TailState = new int [NodeNum];
for( i=0; i<NodeNum; i++ ) for( j=0; j<NodeNum; j++ )
{
tmp = i;
for( k=0; k<NodeOrder; k++ ) tmp = Flnk[tmp][(j>>k)&1];
if( tmp == 0 ) { TailState[i] = j; j = NodeNum; }
}
for( i=0; i<NodeNum; i++ ) for( j=0, k=i; j<NodeOrder; j++ )
{
tmp = ( TailState[i] >> j ) & 1;
TermTable[i][2*j] = tmp;
TermTable[i][2*j+1] = Bcode[k][tmp];
k = Flnk[k][tmp];
}
delete [] TailState;
}
float Conv::LlrAdd( float a, float b )
{
if( a > b )
return a + (float) log( 1 + exp( b - a ) );
else
return b + (float) log( 1 + exp( a - b ) );
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -