⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 mcenc.cpp

📁 《Visual C++小波变换技术与工程实践》靳济芳编著的光盘程序。
💻 CPP
📖 第 1 页 / 共 4 页
字号:
        return;
    }
	if (pmbmd->m_mbType == DIRECT || pmbmd->m_mbType == INTERPOLATE) { // Y is done when doing motion estimation
      if (pmbmd->m_mbType == DIRECT)
        motionCompInterpAndErrorDirect (pmvForward, pmvBackward, x, y,prctMVLimitForward,prctMVLimitBackward);
      else
		motionCompInterpAndError (pmvForward, pmvBackward, x, y,prctMVLimitForward,prctMVLimitBackward);
      CoordI xRefUVForward, yRefUVForward;
      mvLookupUVWithShape (pmbmd, pmvForward, xRefUVForward, yRefUVForward);
      motionCompUV (m_ppxlcPredMBU, m_ppxlcPredMBV, m_pvopcRefQ0, x, y, xRefUVForward, yRefUVForward, m_vopmd.iRoundingControl, prctMVLimitForward);
		
      CoordI xRefUVBackward, yRefUVBackward;
      mvLookupUVWithShape (pmbmd, pmvBackward, xRefUVBackward, yRefUVBackward);
      motionCompUV (m_ppxlcPredMBBackU, m_ppxlcPredMBBackV, m_pvopcRefQ1, x, y, xRefUVBackward, yRefUVBackward, m_vopmd.iRoundingControl, prctMVLimitBackward);
      averagePredAndComputeErrorUV ();
	}
	else { 
		const CMotionVector* pmv;
		const PixelC* ppxlcRef; // point to left-top of the reference VOP
		const PixelC* ppxlcRefZoom; // point to left-top of the reference VOP
		const CVOPU8YUVBA* pvopcRef;
		CRct *prctMVLimit;
		if (pmbmd->m_mbType == FORWARD) { // Y is done when doing motion estimation
			pmv = pmvForward;
			pvopcRef = m_pvopcRefQ0;
			ppxlcRef = m_pvopcRefQ0->pixelsY (); // point to left-top of the reference VOP
			ppxlcRefZoom = m_puciRefQZoom0->pixels (); // point to left-top of the reference VOP
			prctMVLimit=prctMVLimitForward;
		}
		else {
			pmv = pmvBackward;
			pvopcRef = m_pvopcRefQ1;
			ppxlcRef = m_pvopcRefQ1->pixelsY (); // point to left-top of the reference VOP
			ppxlcRefZoom = m_puciRefQZoom1->pixels (); // point to left-top of the reference VOP
			prctMVLimit=prctMVLimitBackward;
		}
        if (!m_volmd.bQuarterSample) {
          motionCompEncY (ppxlcRef, ppxlcRefZoom, m_ppxlcPredMBY, MB_SIZE, pmv, x, y,prctMVLimit);
        }
        else {
          motionCompQuarterSample (m_ppxlcPredMBY,
                                   ppxlcRef, MB_SIZE,
                                   4*x + pmv->trueMVHalfPel ().x,
                                   4*y + pmv->trueMVHalfPel ().y,
                                   m_vopmd.iRoundingControl, prctMVLimit
                                   );
        }
        CoordI xRefUV, yRefUV;
		mvLookupUVWithShape (pmbmd, pmv, xRefUV, yRefUV);
		motionCompUV (m_ppxlcPredMBU, m_ppxlcPredMBV, pvopcRef, x, y, xRefUV, yRefUV, m_vopmd.iRoundingControl, prctMVLimit);
		computeTextureError ();
	}
}

Void CVideoObjectEncoder::motionCompAndDiff_BVOP_MB_WithShape (
	const CMotionVector* pmvForward, const CMotionVector* pmvBackward, 
	CMBMode* pmbmd, 
	CoordI x, CoordI y,
	CRct *prctMVLimitForward,
	CRct *prctMVLimitBackward
)
{
// INTERLACED
// new changes
    if (m_vopmd.bInterlace&&pmbmd->m_bFieldMV) {           // Should work for both progressive and interlaced, but keep
                                        // original code for now due to differences in direct mode.   Bob Eifrig
	    switch (pmbmd->m_mbType) {

	    case FORWARD:
		    motionCompOneBVOPReference(m_pvopcPredMB, FORWARD,  x, y, pmbmd, pmvForward, prctMVLimitForward);
		    computeTextureErrorWithShape();
		    break;

	    case BACKWARD:
		    motionCompOneBVOPReference(m_pvopcPredMB, BACKWARD, x, y, pmbmd, pmvBackward, prctMVLimitBackward);
		    computeTextureErrorWithShape();
		    break;

	    case DIRECT:
		    motionCompDirectMode(x, y, pmbmd,
                &m_rgmvRef[(PVOP_MV_PER_REF_PER_MB*(x + m_iNumMBX * y)) / MB_SIZE],
                prctMVLimitForward, prctMVLimitBackward,0);
		    averagePredAndComputeErrorY_WithShape();
		    averagePredAndComputeErrorUV_WithShape();
		    break;

	    case INTERPOLATE:
		    motionCompOneBVOPReference(m_pvopcPredMB,     FORWARD,  x, y, pmbmd, pmvForward, prctMVLimitForward);
		    motionCompOneBVOPReference(m_pvopcPredMBBack, BACKWARD, x, y, pmbmd, pmvBackward, prctMVLimitBackward);
		    averagePredAndComputeErrorY_WithShape();
		    averagePredAndComputeErrorUV_WithShape();
		    break;
	    }
        return;
    }
// INTERLACED
// end of new changes

	if (pmbmd->m_mbType == DIRECT || pmbmd->m_mbType == INTERPOLATE) { // Y is done when doing motion estimation
      if (pmbmd->m_mbType == DIRECT)
        motionCompInterpAndError_WithShapeDirect (pmvForward, pmvBackward, x, y,prctMVLimitForward,prctMVLimitBackward);
      else
		motionCompInterpAndError_WithShape (pmvForward, pmvBackward, x, y,prctMVLimitForward,prctMVLimitBackward);
      CoordI xRefUVForward, yRefUVForward;
      mvLookupUVWithShape (pmbmd, pmvForward, xRefUVForward, yRefUVForward);
      motionCompUV (m_ppxlcPredMBU, m_ppxlcPredMBV, m_pvopcRefQ0, x, y, xRefUVForward, yRefUVForward, m_vopmd.iRoundingControl, prctMVLimitForward);
		
      CoordI xRefUVBackward, yRefUVBackward;
      mvLookupUVWithShape (pmbmd, pmvBackward, xRefUVBackward, yRefUVBackward);
      motionCompUV (m_ppxlcPredMBBackU, m_ppxlcPredMBBackV, m_pvopcRefQ1, x, y, xRefUVBackward, yRefUVBackward, m_vopmd.iRoundingControl, prctMVLimitBackward);
      averagePredAndComputeErrorUV_WithShape ();
	}
	else { 
		const CMotionVector* pmv;
		const PixelC* ppxlcRef; // point to left-top of the reference VOP
		const PixelC* ppxlcRefZoom; // point to left-top of the reference VOP
		const CVOPU8YUVBA* pvopcRef;
		CRct *prctMVLimit;
		if (pmbmd->m_mbType == FORWARD) { // Y is done when doing motion estimation
			pmv = pmvForward;
			pvopcRef = m_pvopcRefQ0;
			ppxlcRef = m_pvopcRefQ0->pixelsY (); // point to left-top of the reference VOP
			ppxlcRefZoom = m_puciRefQZoom0->pixels (); // point to left-top of the reference VOP
			prctMVLimit = prctMVLimitForward;
		}
		else {
			pmv = pmvBackward;
			pvopcRef = m_pvopcRefQ1;
			ppxlcRef = m_pvopcRefQ1->pixelsY (); // point to left-top of the reference VOP
			ppxlcRefZoom = m_puciRefQZoom1->pixels (); // point to left-top of the reference VOP
			prctMVLimit = prctMVLimitBackward;
		}
        if (!m_volmd.bQuarterSample) {
          motionCompEncY (ppxlcRef, ppxlcRefZoom, m_ppxlcPredMBY, MB_SIZE, pmv, x, y,prctMVLimit);
        }
        else {
          // printf("MC BVOP with Shape:\n\t(xRef,yRef) = \t(%4d,%4d)\n\t(xV,yV) = \t((%4d,%4d)\n",x,y,pmv->trueMVHalfPel ().x,pmv->trueMVHalfPel ().y);
          motionCompQuarterSample (m_ppxlcPredMBY,
                                   ppxlcRef, MB_SIZE,
                                   4*x + pmv->trueMVHalfPel ().x,
                                   4*y + pmv->trueMVHalfPel ().y,
                                   m_vopmd.iRoundingControl, prctMVLimit
                                   );
        }
		CoordI xRefUV, yRefUV;
		mvLookupUVWithShape (pmbmd, pmv, xRefUV, yRefUV);
		motionCompUV (m_ppxlcPredMBU, m_ppxlcPredMBV, pvopcRef, x, y, xRefUV, yRefUV, m_vopmd.iRoundingControl, prctMVLimit);
		computeTextureErrorWithShape ();
	}
}

Void CVideoObjectEncoder::motionCompAndDiffAlpha_BVOP_MB (
	const CMotionVector* pmvForward, const CMotionVector* pmvBackward, 
	const CMBMode* pmbmd, 
	CoordI x, CoordI y,
	CRct *prctMVLimitForward,CRct *prctMVLimitBackward
)
{
  for(Int iAuxComp=0;iAuxComp<m_volmd.iAuxCompCount;iAuxComp++) { // MAC (SB) 29-Nov-99
    if (pmbmd->m_mbType == DIRECT || pmbmd->m_mbType == INTERPOLATE) {
      if (pmbmd->m_mbType == DIRECT) { //changed by mwi
        motionCompMBAEncDirect (pmvForward, pmbmd, m_ppxlcPredMBA, m_pvopcRefQ0,
          x, y, 0, prctMVLimitForward, iAuxComp);
        motionCompMBAEncDirect (pmvBackward, pmbmd, m_ppxlcPredMBBackA, m_pvopcRefQ1,
          x, y, 0, prctMVLimitBackward, iAuxComp);
      }
      else {
        motionCompMBAEnc (pmvForward, pmbmd, m_ppxlcPredMBA, m_pvopcRefQ0,
          x, y, 0, prctMVLimitForward,0,iAuxComp);
        motionCompMBAEnc (pmvBackward, pmbmd, m_ppxlcPredMBBackA, m_pvopcRefQ1,
          x, y, 0, prctMVLimitBackward,1,iAuxComp);
      }
      
      // average predictions
      Int i;
      for(i = 0; i<MB_SQUARE_SIZE; i++)
        m_ppxlcPredMBA[iAuxComp][i] = (m_ppxlcPredMBA[iAuxComp][i] + m_ppxlcPredMBBackA[iAuxComp][i] + 1)>>1;
    }
    else { 
      const CMotionVector* pmv;
      const PixelC* ppxlcRef; // point to left-top of the reference VOP
      CRct *prctMVLimit;
      if (pmbmd->m_mbType == FORWARD) {
        pmv = pmvForward;
        ppxlcRef = m_pvopcRefQ0->pixelsA (iAuxComp); // point to left-top of the reference VOP
        prctMVLimit = prctMVLimitForward;
        // 12.22.98 begin of changes
        if(m_vopmd.bInterlace&&pmbmd->m_bFieldMV)
          motionCompMBAEnc (pmvForward, pmbmd, m_ppxlcPredMBA, m_pvopcRefQ0,
          x, y, 0, prctMVLimitForward,0,iAuxComp);
        // end of changes
      }
      else {
        pmv = pmvBackward;
        ppxlcRef = m_pvopcRefQ1->pixelsA (iAuxComp); // point to left-top of the reference VOP
        prctMVLimit = prctMVLimitBackward;
        // 12.22.98 begin of changes
        if(m_vopmd.bInterlace&&pmbmd->m_bFieldMV)
          motionCompMBAEnc (pmvBackward, pmbmd, m_ppxlcPredMBA, m_pvopcRefQ1,
          x, y, 0, prctMVLimitBackward,1,iAuxComp);
        // end of changes
      }
      if((!m_vopmd.bInterlace||(m_vopmd.bInterlace&&!pmbmd->m_bFieldMV))) // 12.22.98 changes
        if (!m_volmd.bQuarterSample) // Quarterpel, added by mwi
          motionComp (
          m_ppxlcPredMBA[iAuxComp],
          ppxlcRef,
          MB_SIZE, // MB size
          x * 2 + pmv->trueMVHalfPel ().x, 
          y * 2 + pmv->trueMVHalfPel ().y,
          0,
          prctMVLimit
          );
        else
          motionCompQuarterSample (m_ppxlcPredMBA[iAuxComp],
          ppxlcRef, MB_SIZE,
          4*x + pmv->trueMVHalfPel ().x,
          4*y + pmv->trueMVHalfPel ().y,
          m_vopmd.iRoundingControl, prctMVLimit
          );
        // ~Quarterpel, added by mwi
    }
    computeAlphaError();
  }
}

Void CVideoObjectEncoder::averagePredAndComputeErrorUV ()
{
	Int i = 0;
	for (i = 0; i < BLOCK_SQUARE_SIZE; i++) {
		m_ppxlcPredMBU [i] = (m_ppxlcPredMBU [i] + m_ppxlcPredMBBackU [i] + 1) >> 1;
		m_ppxlcPredMBV [i] = (m_ppxlcPredMBV [i] + m_ppxlcPredMBBackV [i] + 1) >> 1;
		m_ppxliErrorMBU [i] = m_ppxlcCurrMBU [i] - m_ppxlcPredMBU [i];
		m_ppxliErrorMBV [i] = m_ppxlcCurrMBV [i] - m_ppxlcPredMBV [i];
	}
}

Void CVideoObjectEncoder::averagePredAndComputeErrorUV_WithShape ()
{
	Int i = 0;
	for (i = 0; i < BLOCK_SQUARE_SIZE; i++) {
		if (m_ppxlcCurrMBBUV [i] == transpValue)
			m_ppxliErrorMBU [i] = m_ppxliErrorMBV [i] = 0;
		else {
			m_ppxlcPredMBU [i] = (m_ppxlcPredMBU [i] + m_ppxlcPredMBBackU [i] + 1) >> 1;
			m_ppxlcPredMBV [i] = (m_ppxlcPredMBV [i] + m_ppxlcPredMBBackV [i] + 1) >> 1;
			m_ppxliErrorMBU [i] = m_ppxlcCurrMBU [i] - m_ppxlcPredMBU [i];
			m_ppxliErrorMBV [i] = m_ppxlcCurrMBV [i] - m_ppxlcPredMBV [i];
		}
	}
}

// B-VOP ME/MC stuff
Int CVideoObjectEncoder::interpolateAndDiffY (
                                              const CMotionVector* pmvForward, const CMotionVector* pmvBackward, 
                                              CoordI x, CoordI y,
                                              CRct *prctMVLimitForward,CRct *prctMVLimitBackward
                                              )
{
  if (!m_volmd.bQuarterSample) {
    motionCompEncY (
                    m_pvopcRefQ0->pixelsY (), 
                    m_puciRefQZoom0->pixels (),
                    m_ppxlcPredMBY, MB_SIZE, pmvForward, x, y,
                    prctMVLimitForward
                    );
    motionCompEncY (
                    m_pvopcRefQ1->pixelsY (), 
                    m_puciRefQZoom1->pixels (),
                    m_ppxlcPredMBBackY, MB_SIZE, pmvBackward, x, y,
                    prctMVLimitBackward
                    );
  }
  else {
    motionCompQuarterSample (m_ppxlcPredMBY,
                             m_pvopcRefQ0->pixelsY (), 
                             MB_SIZE,
                             4*x + 2 * pmvForward->iMVX + pmvForward->iHalfX,
                             4*y + 2 * pmvForward->iMVY + pmvForward->iHalfY,
                             m_vopmd.iRoundingControl,
                             prctMVLimitForward
                             );

    motionCompQuarterSample (m_ppxlcPredMBBackY,
                             m_pvopcRefQ1->pixelsY (), 
                             MB_SIZE,
                             4*x + 2 * pmvBackward->iMVX + pmvBackward->iHalfX,
                             4*y + 2 * pmvBackward->iMVY + pmvBackward->iHalfY,
                             m_vopmd.iRoundingControl,
                             prctMVLimitBackward
                             );
  }
  /*Int iLeft  = (x << 1) + pmvBackward->m_vctTrueHalfPel.x;
	Int iRight = (y << 1) + pmvBackward->m_vctTrueHalfPel.y;	

	//make sure don't end up with a mv that points out of a frame; object-based not done
	if (!m_puciRefQZoom1->where ().includes (CRct (iLeft, iRight, iLeft + (MB_SIZE << 1), iRight + (MB_SIZE << 1))))
    return 1000000000;
	else*/
  Int ic;
  Int iSAD = 0;
  for (ic = 0; ic < MB_SQUARE_SIZE; ic++)
    iSAD += abs (m_ppxlcCurrMBY [ic] - ((m_ppxlcPredMBY [ic] + m_ppxlcPredMBBackY [ic] + 1) >> 1));
  return iSAD;
}

Int CVideoObjectEncoder::interpolateAndDiffY_WithShape (
                                                        const CMotionVector* pmvForward, const CMotionVector* pmvBackward, 
                                                        CoordI x, CoordI y,
                                                        CRct *prctMVLimitForward,CRct *prctMVLimitBackward
                                                        )
{
  if (!m_volmd.bQuarterSample) {
    motionCompEncY (
                    m_pvopcRefQ0->pixelsY (), 
                    m_puciRefQZoom0->pixels (),
                    m_ppxlcPredMBY, MB_SIZE, pmvForward, x, y,
                    prctMVLimitForward
                    );
    motionCompEncY (
                    m_pvopcRefQ1->pixelsY (), 
                    m_puciRefQZoom1->pixels (),
                    m_ppxlcPredMBBackY, MB_SIZE, pmvBackward, x, y,
                    prctMVLimitBackward
                    );
  }
  else {
    motionCompQuarterSample (m_ppxlcPredMBY,
                             m_pvopcRefQ0->pixelsY (), 
                             MB_SIZE,
                             4*x + 2 * pmvForward->iMVX + pmvForward->iHalfX,
                             4*y + 2 * pmvForward->iMVY + pmvForward->iHalfY,
                             m_vopmd.iRoundingControl,
                             prctMVLimitForward
                             );

    motionCompQuarterSample (m_ppxlcPredMBBackY,
                             m_pvopcRefQ1->pixelsY (), 
                             MB_SIZE,
                             4*x + 2 * pmvBackward->iMVX + pmvBackward->iHalfX,
                             4*y + 2 * pmvBackward->iMVY + pmvBackward->iHalfY,
                             m_vopmd.iRoundingControl,
                             prctMVLimitBackward
                             );
  }
  Int ic;
  Int iSAD = 0;
  for (ic = 0; ic < MB_SQUARE_SIZE; ic++) {
    if (m_ppxlcCurrMBBY [ic] != transpValue)
      iSAD += abs (m_ppxlcCurrMBY [ic] - ((m_ppxlcPredMBY [ic] + m_ppxlcPredMBBackY [ic] + 1) >> 1));
  }
  return iSAD;
}

Int CVideoObjectEncoder::interpolateAndDiffYField(
	const CMotionVector* pmvFwdTop,
	const CMotionVector* pmvFwdBot,
	const CMotionVector* pmvBakTop,
	const CMotionVector* pmvBakBot,
	CoordI x, CoordI y,
	CMBMode *pmbmd
)
{
  CoordI xQP = 4*x;
  CoordI yQP = 4*y;  
	x <<= 1;
	y <<= 1;
    if (m_volmd.bQuarterSample) // Quarter sample, added by mwi
          motionCompQuarterSample (
                                   m_ppxlcPredMBY, 
                                   m_pvopcRefQ0->pixelsY () + pmbmd->m_bForwardTop * m_iFrameWidthY,
                                   0,
                                   xQP + 2*pmvFwdTop->iMVX + pmvFwdTop->iHalfX,
                                   yQP + 2*pmvFwdTop->iMVY + pmvFwdTop->iHalfY,
                                   m_vopmd.iRoundingControl, &m_rctRefVOPY0
                                   );
    else      
      motionCompYField(m_ppxlcPredMBY,
                       m_pvopcRefQ0->pixelsY () + pmbmd->m_bForwardTop * m_iFrameWidthY,
                       x + pmvFwdTop->m_vctTrueHalfPel.x, y + pmvFwdTop->m_vctTrueHalfPel.y,
	                   &m_rctRefVOPY0); // added by Y.Suzuki for the extended bounding box support

    if (m_volmd.bQuarterSample) // Quarter sample, added by mwi
          motionCompQuarterSample (
                                   m_ppxlcPredMBY + MB_SIZE, 
                                   m_pvopcRefQ0->pixelsY () + pmbmd->m_bForwardBottom * m_iFrameWidthY,
                                   0,
                                   xQP + 2*pmvFwdBot->iMVX + pmvFwdBot->iHalfX,
                                   yQP + 2*pmvFwdBot->iMVY + pmvFwdBot->iHalfY,
                                   m_vopmd.iRoundingControl, &m_rctRefVOPY0
                                   );
    else      
      motionCompYField(m_ppxlcPredMBY + MB_SIZE,
                       m_pvopcRefQ0->pixelsY () + pmbmd->m_bForwardBottom * m_iFrameWidthY,
                       x + pmvFwdBot->m_vctTrueHalfPel.x, y + pmvFwdBot->m_vctTrueHalfPel.y,

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -