📄 mp4decvopp.c
字号:
} if (mp4_DecodeCBPY_P(pInfo, &cbpy, mb_type) != MP4_STATUS_OK) return MP4_STATUS_ERROR; if (mb_type == IPPVC_MBTYPE_INTER_Q || mb_type == IPPVC_MBTYPE_INTRA_Q) mp4_UpdateQuant(pInfo, quant); pat = (cbpy << 2) + cbpc; pMBinfoMT->pat = (Ipp8u)pat; if (mb_type >= IPPVC_MBTYPE_INTRA) { mp4_ReconstructCoeffsIntraMB_SVH(pInfo, pMBinfoMT->dctCoeffs, pMBinfoMT->lnz, pat, quant); pMBinfo->mv[0].dx = pMBinfo->mv[0].dy = 0; } else { if (mp4_PredictDecodeMV(pInfo, pMBinfo, pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.frGOB, curRow, j) != MP4_STATUS_OK) { mp4_Error("Error when decode motion vector"); return MP4_STATUS_ERROR; } mp4_ReconstructCoeffsInterMB_SVH(pInfo, pMBinfoMT->dctCoeffs, pMBinfoMT->lnz, pat, quant); } } mp4_StatisticInc_(&pInfo->VisualObject.Statistic.nMB); pMBinfo ++; pMBinfoMT ++; mp4_CheckDecodeGOB_SVH(pInfo, pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.nmb, pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.frGOB, curRow, quant); } pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant = quant; } else { quant = quantPred = pInfo->VisualObject.VideoObject.VideoObjectPlane.quant; scan = pInfo->VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag ? IPPVC_SCAN_VERTICAL : IPPVC_SCAN_ZIGZAG; interlaced = pInfo->VisualObject.VideoObject.interlaced; pMVField = interlaced ? pInfo->VisualObject.VideoObject.FieldMV + curRow * mbPerRow * 2 : 0; // init for non-interlaced field_prediction = dct_type = mb_ftfr = mb_fbfr = 0; // reset B-prediction blocks on new row mp4_ResetIntraPredBblock(pInfo); for (j = 0; j < mbPerRow;) { mb_not_coded = mp4_GetBit(pInfo); mb_type = IPPVC_MBTYPE_INTER; if (!mb_not_coded) if (mp4_DecodeMCBPC_P(pInfo, &mb_type, &cbpc, 1) != MP4_STATUS_OK) return MP4_STATUS_ERROR; if (mb_type != IPPVC_MB_STUFFING) { pMBinfo->validPred = 1; if (!mb_not_coded) { if (mb_type >= IPPVC_MBTYPE_INTRA) ac_pred_flag = mp4_GetBit(pInfo); if (mp4_DecodeCBPY_P(pInfo, &cbpy, mb_type) != MP4_STATUS_OK) return MP4_STATUS_ERROR; quantPred = quant; if (mb_type == IPPVC_MBTYPE_INTER_Q || mb_type == IPPVC_MBTYPE_INTRA_Q) mp4_UpdateQuant(pInfo, quant); if (interlaced) { dct_type = 0; field_prediction = 0; if (mb_type >= IPPVC_MBTYPE_INTRA || (cbpy + cbpc) != 0) dct_type = mp4_GetBit(pInfo); if (mb_type == IPPVC_MBTYPE_INTER || mb_type == IPPVC_MBTYPE_INTER_Q) { field_prediction = mp4_GetBit(pInfo); if (field_prediction) { mb_ftfr = mp4_GetBit(pInfo); mb_fbfr = mp4_GetBit(pInfo); } } } pat = (cbpy << 2) + cbpc; pMBinfoMT->pat = (Ipp8u)pat; if (mb_type >= IPPVC_MBTYPE_INTRA) { if (curRow == 0 && j == 0) quantPred = quant; dcVLC = (quantPred < mp4_DC_vlc_Threshold[pInfo->VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr]) ? 1 : 0; if (mp4_ReconstructCoeffsIntraMB(pInfo, j, pat, quant, dcVLC, ac_pred_flag, pMBinfoMT->dctCoeffs, pMBinfoMT->lnz) != MP4_STATUS_OK) { mp4_Error("Error when decode coefficients of Intra block"); return MP4_STATUS_ERROR; } mp4_Zero4MV(pMBinfo->mv); } else { mp4_UpdateIntraPredBuffInvalid(pInfo, j); if (!field_prediction) { if (mb_type != IPPVC_MBTYPE_INTER4V) { if (mp4_PredictDecode1MV(pInfo, pMBinfo, curRow, j) != MP4_STATUS_OK) { mp4_Error("Error when decode motion vector"); return MP4_STATUS_ERROR; } pMBinfo->mv[1] = pMBinfo->mv[2] = pMBinfo->mv[3] = pMBinfo->mv[0]; } else { if (mp4_PredictDecode4MV(pInfo, pMBinfo, curRow, j) != MP4_STATUS_OK) { mp4_Error("Error when decode motion vector"); return MP4_STATUS_ERROR; } } } else { if (mp4_PredictDecodeFMV(pInfo, pMBinfo, curRow, j, &pMVField[0], &pMVField[1]) != MP4_STATUS_OK) { mp4_Error("Error when decode motion vector"); return MP4_STATUS_ERROR; } } mp4_ReconstructCoeffsInterMB(pInfo, pMBinfoMT->dctCoeffs, pMBinfoMT->lnz, pat, 0, scan, quant); } } else { mp4_UpdateIntraPredBuffInvalid(pInfo, j); mp4_StatisticInc_(&pInfo->VisualObject.Statistic.nMB_NOTCODED); mp4_Zero4MV(pMBinfo->mv); field_prediction = 0; } mp4_StatisticInc_(&pInfo->VisualObject.Statistic.nMB); pMBinfo->not_coded = (Ipp8u)mb_not_coded; pMBinfo->type = (Ipp8u)mb_type; if (interlaced) { pMBinfoMT->dct_type = (Ipp8u)dct_type; pMBinfo->field_info = (Ipp8u)(field_prediction + (mb_ftfr << 1) + (mb_fbfr << 2)); pMVField += 2; } pMBinfo ++; pMBinfoMT ++; j ++; } if (!pInfo->VisualObject.VideoObject.resync_marker_disable) { int found; if (mp4_DecodeVideoPacket(pInfo, &quant, &found) == MP4_STATUS_OK) { if (found) { // reset Intra prediction buffer on new Video_packet mp4_ResetIntraPredBuffer(pInfo); // mark MBs the previous videopacket as invalid for prediction for (i = 1; i <= (curRow == 0 ? j : mbPerRow + 1); i ++) { pMBinfo[-i].validPred = 0; } } } else return MP4_STATUS_ERROR; } } pInfo->VisualObject.VideoObject.VideoObjectPlane.quant = quant; } return MP4_STATUS_OK;}static void mp4_DecodeVOP_P_ReconSlice(mp4_Info* pInfo, int curRow, mp4_MacroBlockMT* pMBinfoMT){ int j, dx, dy, mbPerRow, pYoff23; int stepYr, stepYc, stepCbr, stepCbc, stepCrr, stepCrc, stepFc[6], stepY; Ipp8u *pYc, *pCbc, *pCrc, *pYr, *pCbr, *pCrr, *pFc[6]; int mb_not_coded, mb_type, pat, scan, obmc_disable, rt, quarter_sample; int interlaced, field_prediction, dct_type, mb_ftfr, mb_fbfr; IppiRect limitRectL, limitRectC; mp4_MacroBlock *pMBinfo; IppMotionVector mvCur[4], mvCbCr, mvCbCrT, mvCbCrB, *pMVField, mvTmpT, mvTmpB; Ipp16s* coeffMB; mbPerRow = pInfo->VisualObject.VideoObject.MacroBlockPerRow; stepYc = pInfo->VisualObject.cFrame.stepY; stepYr = pInfo->VisualObject.rFrame.stepY; stepCbc = pInfo->VisualObject.cFrame.stepCb; stepCbr = pInfo->VisualObject.rFrame.stepCb; stepCrc = pInfo->VisualObject.cFrame.stepCr; stepCrr = pInfo->VisualObject.rFrame.stepCr; pYc = pInfo->VisualObject.cFrame.pY + curRow * 16 * stepYc; pCbc = pInfo->VisualObject.cFrame.pCb + curRow * 8 * stepCbc; pCrc = pInfo->VisualObject.cFrame.pCr + curRow * 8 * stepCrc; pYr = pInfo->VisualObject.rFrame.pY + curRow * 16 * stepYr; pCbr = pInfo->VisualObject.rFrame.pCb + curRow * 8 * stepCbr; pCrr = pInfo->VisualObject.rFrame.pCr + curRow * 8 * stepCrr; dy = curRow * 16; dx = 0; stepFc[0] = stepFc[1] = stepFc[2] = stepFc[3] = stepYc; stepFc[4] = stepCbc; stepFc[5] = stepCrc; // Bounding rectangle for MV limitation limitRectL.x = - 16; limitRectL.y = - 16; limitRectL.width = pInfo->VisualObject.VideoObject.width + 32; limitRectL.height = pInfo->VisualObject.VideoObject.height + 32; pMBinfo = pInfo->VisualObject.VideoObject.MBinfo + curRow * mbPerRow; if (pInfo->VisualObject.VideoObject.short_video_header) { for (j = 0; j < mbPerRow; j ++) { if (pMBinfo->not_coded) { ippiCopy16x16_8u_C1R(pYr, stepYr, pYc, stepYc); ippiCopy8x8_8u_C1R(pCbr, stepCbr, pCbc, stepCbc); ippiCopy8x8_8u_C1R(pCrr, stepCrr, pCrc, stepCrc); } else { coeffMB = pMBinfoMT->dctCoeffs; if (pMBinfo->type >= IPPVC_MBTYPE_INTRA) { pFc[0] = pYc; pFc[1] = pYc + 8; pFc[2] = pYc + 8 * stepYc; pFc[3] = pYc + 8 * stepYc + 8; pFc[4] = pCbc; pFc[5] = pCrc; mp4_DCTInvCoeffsIntraMB(coeffMB, pMBinfoMT->lnz, pFc, stepFc); } else { mp4_LimitMV(&pMBinfo->mv[0], &mvCur[0], &limitRectL, dx, dy, 16); mp4_ComputeChromaMV(&mvCur[0], &mvCbCr); pat = pMBinfoMT->pat; mp4_DCTInvCoeffsInterMB_SVH(coeffMB, pMBinfoMT->lnz, pat); mp4_Copy16x16HP_8u(pYr, stepYr, pYc, stepYc, &mvCur[0], 0); mp4_AddResidual(pat & 32, pYc, stepYc, coeffMB); mp4_AddResidual(pat & 16, pYc+8, stepYc, coeffMB+64); mp4_AddResidual(pat & 8, pYc+stepYc*8, stepYc, coeffMB+128); mp4_AddResidual(pat & 4, pYc+stepYc*8+8, stepYc, coeffMB+192); mp4_MC_HP(pat & 2, pCbr, stepCbr, pCbc, stepCbc, coeffMB+256, &mvCbCr, 0); mp4_MC_HP(pat & 1, pCrr, stepCrr, pCrc, stepCrc, coeffMB+320, &mvCbCr, 0); } } pMBinfo ++; pMBinfoMT ++; pYc += 16; pCrc += 8; pCbc += 8; pYr += 16; pCrr += 8; pCbr += 8; dx += 16; } } else { rt = pInfo->VisualObject.VideoObject.VideoObjectPlane.rounding_type; quarter_sample = pInfo->VisualObject.VideoObject.quarter_sample; obmc_disable = pInfo->VisualObject.VideoObject.obmc_disable; scan = pInfo->VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag ? IPPVC_SCAN_VERTICAL : IPPVC_SCAN_ZIGZAG; // Bounding rectangles for MV limitation limitRectC.x = -8; limitRectC.y = -8; limitRectC.width = (pInfo->VisualObject.VideoObject.width >> 1) + 16; limitRectC.height = (pInfo->VisualObject.VideoObject.height >> 1) + 16; interlaced = pInfo->VisualObject.VideoObject.interlaced; pMVField = interlaced ? pInfo->VisualObject.VideoObject.FieldMV + curRow * mbPerRow * 2 : 0; // init for non-interlaced stepY = stepYc; pYoff23 = 8 * stepYc; field_prediction = dct_type = mb_ftfr = mb_fbfr = 0; // warning "variable may be used without having been initialized" mvCbCr.dx = mvCbCr.dy = mvCbCrT.dx = mvCbCrT.dy = mvCbCrB.dx = mvCbCrB.dy = 0; for (j = 0; j < mbPerRow; j ++) { mb_not_coded = pMBinfo->not_coded; mb_type = pMBinfo->type; coeffMB = pMBinfoMT->dctCoeffs; if (mb_type >= IPPVC_MBTYPE_INTRA) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -