📄 sp_sfrm.c
字号:
swPnShift = 0;
}
else
{
if (sub(swPnEnergy, 0x1fff) <= 0)
{ /* E = (0x7ff.. 0x1fff] */
swPnShift = 1;
}
else
{
swPnShift = 2; /* E = (0x1fff..0x7fff] */
}
}
/* shift pswWSVect down by the shift factor */
/*------------------------------------------*/
for (i = 0; i < S_LEN; i++)
pswWSVec[i] = shr(pswWSVec[i], swPnShift);
if (swVoicingMode > 0)
{
/* Do restricted adaptive codebook (long-term-predictor) search: */
/* the search is restricted to the lags passed in from the */
/* open-loop lag search */
/*---------------------------------------------------------------*/
siCode = closedLoopLagSearch(pswLagList, siNumLags,
pswLtpStateBase, pswHCoefs, pswWSVec,
&swLag, &swLtpShift);
/* Construct frame-lag code if this is the first subframe, */
/* or delta-lag code if it is not the first subframe */
/*---------------------------------------------------------*/
if (swVoicingMode > 0)
{
if (giSfrmCnt == 0)
{
siPrevLagCode = siCode;
*psiLagCode = siCode;
}
else
{
*psiLagCode = add(sub(siCode, siPrevLagCode), DELTA_LEVELS / 2);
siPrevLagCode = siCode;
}
}
/* From the value of the fundamental pitch obtained in the open-loop */
/* lag search, get the correct phase of the interpolating filter, */
/* and scale the coefficients by the Harmonic-Noise-Weighting */
/* coefficient. The result is the interpolating coefficients scaled */
/* by the HNW coefficient. These will be used in all C(z) filtering */
/*-------------------------------------------------------------------*/
get_ipjj(swPitch, &siIntPitch, &siRemainder);
for (i = 0; i < CG_INT_MACS; i++)
{
pswHNWCoefs[i] = mult_r(negate(ppsrCGIntFilt[i][siRemainder]),
swHNWCoef);
}
/* Calculate a few values which will speed up C(z) filtering: */
/* "HnwOffset" is the distance in samples from the input sample of */
/* the C(z) filter to the first sample tapped by the interpolating */
/* filter. "HnwNum" is the number of samples which need to be */
/* filtered by C(z) in the zero-state case. */
/*-----------------------------------------------------------------*/
siHnwOffset = sub(-CG_INT_MACS / 2, siIntPitch);
siHnwNum = sub(S_LEN + CG_INT_MACS / 2 - 1, siIntPitch);
/* Perform C(z) filter on W(z)-weighted speech, get zero-input */
/* response of H(z)C(z) combo, subtract zero-input response */
/* of H(z)C(z) from W(z)C(z)-weighted speech. The result is */
/* the vector to match for the rest of the synthetic */
/* excitation searches in the voiced modes */
/*-------------------------------------------------------------*/
hnwFilt(pswWSpeech, pswWSVec, &pswWSpeech[-1], pswHNWCoefs,
siHnwOffset, 1, S_LEN);
hnwFilt(pswTempVec, pswTempVec, &pswHNWState[HNW_BUFF_LEN - 1],
pswHNWCoefs, siHnwOffset, 1, S_LEN);
for (i = 0; i < S_LEN; i++)
{
pswWSVec[i] = shr(sub(pswWSVec[i], pswTempVec[i]), swPnShift);
}
/* Recontruct adaptive codebook (long-term-predictor) vector, */
/* weight it through H(z) and C(z), each with zero state */
/*------------------------------------------------------------*/
fp_ex(swLag, pswLtpStateOut);
for (i = 0; i < S_LEN; i++)
pswPVec[i] = pswLtpStateOut[i];
lpcZsIir(pswPVec, pswHCoefs, pswWPVec);
if (siHnwNum > 0)
{
hnwFilt(pswWPVec, pswWPVec, NULL, pswHNWCoefs, siHnwOffset,
0, siHnwNum);
}
for (i = 0; i < S_LEN; i++)
{
pswPVec[i] = shr(pswPVec[i], swLtpShift);
pswWPVec[i] = shr(pswWPVec[i], swLtpShift);
}
}
else
{
/* Unvoiced mode: clear all voiced variables */
/*-------------------------------------------*/
swLag = 0;
*psiLagCode = 0;
siHnwNum = 0;
}
/* "NumBasisVecs" will be the number of basis vectors in */
/* the vector-sum codebook(s) */
/*-------------------------------------------------------*/
if (swVoicingMode > 0)
siNumBasisVecs = C_BITS_V;
else
siNumBasisVecs = C_BITS_UV;
/* Filter the basis vectors through H(z) with zero state, and if */
/* voiced, through C(z) with zero state */
/*----------------------------------------------------------------*/
for (i = 0; i < siNumBasisVecs; i++)
{
if (swVoicingMode > 0)
{
lpcZsIir((Shortword *) pppsrVcdCodeVec[0][i], pswHCoefs,
&pswWBasisVecs[i * S_LEN]);
}
else
{
lpcZsIir((Shortword *) pppsrUvCodeVec[0][i], pswHCoefs,
&pswWBasisVecs[i * S_LEN]);
}
if (siHnwNum > 0)
{
hnwFilt(&pswWBasisVecs[i * S_LEN], &pswWBasisVecs[i * S_LEN],
NULL, pswHNWCoefs, siHnwOffset, 0, siHnwNum);
}
}
/* If voiced, make the H(z)C(z)-weighted basis vectors orthogonal to */
/* the H(z)C(z)-weighted adaptive codebook vector */
/*-------------------------------------------------------------------*/
if (swVoicingMode > 0)
decorr(siNumBasisVecs, pswWPVec, pswWBasisVecs);
/* Do the vector-sum codebook search on the H(z)C(z)-weighted, */
/* orthogonalized basis vectors */
/*-------------------------------------------------------------*/
*psiVSCode1 = v_srch(pswWSVec, pswWBasisVecs, siNumBasisVecs);
/* Construct the chosen vector-sum codebook vector from basis vectors */
/*--------------------------------------------------------------------*/
b_con(*psiVSCode1, siNumBasisVecs, pswBitArray);
if (swVoicingMode > 0)
v_con((Shortword *) pppsrVcdCodeVec[0][0], ppswVselpEx[0], pswBitArray,
siNumBasisVecs);
else
v_con((Shortword *) pppsrUvCodeVec[0][0], ppswVselpEx[0], pswBitArray,
siNumBasisVecs);
if (swVoicingMode == 0)
{
/* Construct the H(z)-weighted 1st-codebook vector */
/*-------------------------------------------------*/
v_con(pswWBasisVecs, ppswWVselpEx[0], pswBitArray, siNumBasisVecs);
/* Filter the 2nd basis vector set through H(z) with zero state */
/*--------------------------------------------------------------*/
for (i = 0; i < siNumBasisVecs; i++)
{
lpcZsIir((Shortword *) pppsrUvCodeVec[1][i], pswHCoefs,
&pswWBasisVecs[i * S_LEN]);
}
/* Make the 2nd set of H(z)-weighted basis vectors orthogonal to the */
/* H(z)-weighted 1st-codebook vector */
/*-------------------------------------------------------------------*/
decorr(siNumBasisVecs, ppswWVselpEx[0], pswWBasisVecs);
/* Do the vector-sum codebook search on the H(z)-weighted, */
/* orthogonalized, 2nd basis vector set */
/*---------------------------------------------------------*/
*psiVSCode2 = v_srch(pswWSVec, pswWBasisVecs, siNumBasisVecs);
/* Construct the chosen vector-sum codebook vector from the 2nd set */
/* of basis vectors */
/*------------------------------------------------------------------*/
b_con(*psiVSCode2, siNumBasisVecs, pswBitArray);
v_con((Shortword *) pppsrUvCodeVec[1][0], ppswVselpEx[1], pswBitArray,
siNumBasisVecs);
}
else
*psiVSCode2 = 0;
/* Filter the 1st-codebook vector through H(z) (also through C(z) */
/* if appropriate) */
/*----------------------------------------------------------------*/
lpcZsIir(ppswVselpEx[0], pswHCoefs, ppswWVselpEx[0]);
if (siHnwNum > 0)
{
hnwFilt(ppswWVselpEx[0], ppswWVselpEx[0], NULL, pswHNWCoefs,
siHnwOffset, 0, siHnwNum);
}
if (swVoicingMode == 0)
{
/* Filter the 2nd-codebook vector through H(z) */
/*---------------------------------------------*/
lpcZsIir(ppswVselpEx[1], pswHCoefs, ppswWVselpEx[1]);
}
/* Get the square-root of the ratio of residual energy to */
/* excitation vector energy for each of the excitation sources */
/*-------------------------------------------------------------*/
if (swVoicingMode > 0)
{
rs_rrNs(pswPVec, snsSqrtRs, &snsRs00);
}
rs_rrNs(ppswVselpEx[0], snsSqrtRs, &snsRs11);
if (swVoicingMode == 0)
{
rs_rrNs(ppswVselpEx[1], snsSqrtRs, &snsRs22);
}
/* Determine the vector-quantized gains for each of the excitations */
/*------------------------------------------------------------------*/
*psiGsp0Code = g_quant_vl(swVoicingMode, pswWSVec, swPnShift,
pswWPVec,
ppswWVselpEx[0], ppswWVselpEx[1], snsRs00,
snsRs11, snsRs22);
} /* DTX mode */
else /* DTX mode */
{ /* DTX mode */
/* swSP == 0, currently in comfort noise insertion mode */ /* DTX mode */
/*------------------------------------------------------*/ /* DTX mode */
/* generate the random codevector */ /* DTX mode */
siNumBasisVecs = C_BITS_UV; /* DTX mode */
/* build codevector 1 */ /* DTX mode */
b_con(*psiVSCode1, siNumBasisVecs, pswBitArray); /* DTX mode */
v_con((Shortword *) pppsrUvCodeVec[0][0], ppswVselpEx[0], /* DTX mode */
pswBitArray, siNumBasisVecs); /* DTX mode */
/* build codevector 2 */ /* DTX mode */
b_con(*psiVSCode2, siNumBasisVecs, pswBitArray); /* DTX mode */
v_con((Shortword *) pppsrUvCodeVec[1][0], ppswVselpEx[1], /* DTX mode */
pswBitArray, siNumBasisVecs); /* DTX mode */
/* get rs_rr for the two vectors */ /* DTX mode */
rs_rrNs(ppswVselpEx[0], snsSqrtRs, &snsRs11); /* DTX mode */
rs_rrNs(ppswVselpEx[1], snsSqrtRs, &snsRs22); /* DTX mode */
} /* DTX mode */
/* Scale the excitations, each by its gain, and add them. Put the */
/* result at the end of the adaptive codebook (long-term-predictor */
/* state) */
/*-----------------------------------------------------------------*/
if (swVoicingMode == 0)
{
/* unvoiced */
/* -------- */
scaleExcite(ppswVselpEx[0],
pppsrGsp0[swVoicingMode][*psiGsp0Code][0],
snsRs11, ppswVselpEx[0]);
scaleExcite(ppswVselpEx[1],
pppsrGsp0[swVoicingMode][*psiGsp0Code][1],
snsRs22, ppswVselpEx[1]);
/* now combine the two scaled excitations */
/* -------------------------------------- */
for (i = 0; i < S_LEN; i++)
pswTempVec[i] = add(ppswVselpEx[0][i], ppswVselpEx[1][i]);
}
else
{
/* voiced */
/* ------ */
scaleExcite(pswPVec,
pppsrGsp0[swVoicingMode][*psiGsp0Code][0],
snsRs00, pswPVec);
scaleExcite(ppswVselpEx[0],
pppsrGsp0[swVoicingMode][*psiGsp0Code][1],
snsRs11, ppswVselpEx[0]);
/* now combine the two scaled excitations */
/* -------------------------------------- */
for (i = 0; i < S_LEN; i++)
pswTempVec[i] = add(pswPVec[i], ppswVselpEx[0][i]);
}
/* Update the long-term-predictor state using the synthetic excitation */
/*---------------------------------------------------------------------*/
for (i = -LTP_LEN; i < -S_LEN; i++)
pswLtpStateOut[i] = pswLtpStateOut[i + S_LEN];
for (i = -S_LEN, j = 0; j < S_LEN; i++, j++)
pswLtpStateOut[i] = pswTempVec[j];
/* Filter the synthet
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -