⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 h263decframe.c

📁 audio-video-codecs.rar语音编解码器
💻 C
📖 第 1 页 / 共 3 页
字号:
  /* real time = pInfo->VideoSequence.pic_time/1800000 */
  if (VPic->prev_clock_divisor) {
    time_factor = VPic->prev_clock_divisor * (1000 + VPic->prev_clock_conversion_code);
  } else {
    time_factor = H263_DEFAULT_FRAME_INTERVAL;
  }

  if (VPic->picture_coding_type == H263_PIC_TYPE_PB || VPic->picture_coding_type == H263_PIC_TYPE_iPB)
    pInfo->VideoSequence.pic_time_pb = pInfo->VideoSequence.prevP_pic_time + VPic->temporal_reference_B * time_factor;

  if (VPic->picture_coding_type != H263_PIC_TYPE_B) {
    pic_time = pInfo->VideoSequence.pic_time = pInfo->VideoSequence.prevP_pic_time + temp_ref_delta * time_factor;

    pInfo->VideoSequence.ref_temporal_reference = pInfo->VideoSequence.prevP_temporal_reference;
    pInfo->VideoSequence.prevP_temporal_reference = VPic->temporal_reference;

    pInfo->VideoSequence.ref_pic_time = pInfo->VideoSequence.prevP_pic_time;
    pInfo->VideoSequence.prevP_pic_time = pInfo->VideoSequence.pic_time;
  } else
    pic_time = pInfo->VideoSequence.ref_pic_time + temp_ref_delta * time_factor;

  switch (VPic->picture_coding_type) {
  case H263_PIC_TYPE_I :
    /* set new video frame */
    if (pInfo->VideoSequence.PicIndex == 0) {
      pInfo->VideoSequence.vFrame = NULL;
    } else {
      h263_SWAP(h263_Frame, pInfo->VideoSequence.rFrame, pInfo->VideoSequence.cFrame);
      if (pInfo->VideoSequence.Bpics_to_show > 0) {
        pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.nFrame;
        pInfo->VideoSequence.Bpics_to_show--;
        pInfo->VideoSequence.Ppics_to_show++;
      } else
        pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.rFrame;
    }
#ifdef _OMP_KARABAS
    if (/*!pInfo->VideoSequence.data_partitioned && */ pInfo->number_threads > 1)
      status = h263_DecodeFrame_I_MT(pInfo);
    else
#endif
    status = h263_DecodeFrame_I(pInfo);
    h263_PadFrame(&pInfo->VideoSequence.cFrame);

    pInfo->VideoSequence.cFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type;
    /* save current frame time */
    pInfo->VideoSequence.cFrame.time = pic_time;

    /* set past and future time for B-Plane */
    pInfo->VideoSequence.rTime = pInfo->VideoSequence.nTime;
    pInfo->VideoSequence.nTime = pic_time;
    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_I);
    break;
  case H263_PIC_TYPE_P :
    /* set new video frame */
    h263_SWAP(h263_Frame, pInfo->VideoSequence.rFrame, pInfo->VideoSequence.cFrame);
    if (pInfo->VideoSequence.Bpics_to_show > 0) {
      pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.nFrame;
      pInfo->VideoSequence.Bpics_to_show--;
      pInfo->VideoSequence.Ppics_to_show++;
    } else
      pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.rFrame;

    if (VPic->modes.resample || VPic->implicit_resample) {
      h263_Resample(pInfo, &pInfo->VideoSequence.rFrame, &pInfo->VideoSequence.bFrame, VPic->implicit_resample);
      refFrame = &pInfo->VideoSequence.bFrame;
    } else
      refFrame = &pInfo->VideoSequence.rFrame;

    pInfo->VideoSequence.refFrame = refFrame;

#ifdef _OMP_KARABAS
    if (/*!pInfo->VideoSequence.data_partitioned && */ pInfo->number_threads > 1)
      status = h263_DecodeFrame_P_MT(pInfo, refFrame);
    else
#endif
    status = h263_DecodeFrame_P(pInfo, refFrame);
    h263_PadFrame(&pInfo->VideoSequence.cFrame);

    pInfo->VideoSequence.cFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type;
    /* save current frame time */
    pInfo->VideoSequence.cFrame.time = pic_time;

    /* set past and future time for B-Frame */
    pInfo->VideoSequence.rTime = pInfo->VideoSequence.nTime;
    pInfo->VideoSequence.nTime = pic_time;
    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_P);
    break;
  case H263_PIC_TYPE_PB :
  case H263_PIC_TYPE_iPB :

    /* set new video frame */
    h263_SWAP(h263_Frame, pInfo->VideoSequence.rFrame, pInfo->VideoSequence.cFrame);

    if (pInfo->VideoSequence.Bpics_to_show > 0) {
      h263_SWAP(h263_Frame, pInfo->VideoSequence.nFrame, pInfo->VideoSequence.bFrame);
      pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.bFrame;
      pInfo->VideoSequence.Ppics_to_show++;
    } else {
      pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.rFrame;
      pInfo->VideoSequence.Bpics_to_show++;
    }

    if (VPic->modes.resample || VPic->implicit_resample) {
      h263_Resample(pInfo, &pInfo->VideoSequence.rFrame, &pInfo->VideoSequence.aFrame, VPic->implicit_resample);
      refFrame = &pInfo->VideoSequence.aFrame;
    } else
      refFrame = &pInfo->VideoSequence.rFrame;

    pInfo->VideoSequence.refFrame = refFrame;

#ifdef _OMP_KARABAS
    if (/*!pInfo->VideoSequence.data_partitioned && */ pInfo->number_threads > 1)
      status = h263_DecodeFrame_P_MT(pInfo, refFrame);
    else
#endif
    status = h263_DecodeFrame_P(pInfo, refFrame);
    h263_PadFrame(&pInfo->VideoSequence.cFrame);

    pInfo->VideoSequence.cFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type;
    pInfo->VideoSequence.nFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type; /* ??? */
    /* save current frame time */
    pInfo->VideoSequence.cFrame.time = pic_time;
    pInfo->VideoSequence.nFrame.time = pInfo->VideoSequence.pic_time_pb;

    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_PB);

    /* set past and future time for B-Frames */
    pInfo->VideoSequence.rTime = pInfo->VideoSequence.nTime;
    pInfo->VideoSequence.nTime = pic_time;
    break;
  case H263_PIC_TYPE_B :

    pInfo->VideoSequence.TRB = (Ipp32s)(pic_time - pInfo->VideoSequence.rTime);
    pInfo->VideoSequence.TRD = (Ipp32s)(pInfo->VideoSequence.nTime - pInfo->VideoSequence.rTime);

    if (VPic->enh_layer_num == 1) {
      if (pInfo->VideoSequence.Bpics_to_show > 0)
        h263_SWAP(h263_Frame, pInfo->VideoSequence.bFrame, pInfo->VideoSequence.nFrame);
      refFrame = &pInfo->VideoSequence.rFrame;
      curFrame = &pInfo->VideoSequence.nFrame;
      auxFrame = &pInfo->VideoSequence.aFrame;
    } else {
      enh_layer = pInfo->VideoSequence.enhLayers[VPic->enh_layer_num - 2];
      curFrame = &enh_layer->n_Frame;
      auxFrame = &enh_layer->a_Frame;
      if (VPic->ref_layer_num == 1)
        refFrame = &pInfo->VideoSequence.rFrame;
      else {
       ref_layer = pInfo->VideoSequence.enhLayers[VPic->ref_layer_num - 2];
       refFrame = &ref_layer->r_Frame;
      }
    }

    if (VPic->modes.resample || VPic->implicit_resample) {
      Ipp32s modiFlag;
      H263_COMPARE_RESAMPLE_PARAMS(modiFlag, &pInfo->VideoSequence, VPic);
      if (modiFlag) {
        h263_Resample(pInfo, refFrame, auxFrame, VPic->implicit_resample);
        refFrame = auxFrame;
      } else
        refFrame = (VPic->enh_layer_num == 1 ? pInfo->VideoSequence.refFrame : &enh_layer->a_Frame);
    }

    status = h263_DecodeFrame_B(pInfo, enh_layer, refFrame);

    if (VPic->enh_layer_num == 1) {
      pInfo->VideoSequence.nFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type;
      pInfo->VideoSequence.nFrame.time = pic_time;
      if (pInfo->VideoSequence.Bpics_to_show > 0) {
        /* check what goes first: B (nFrame) or B-part (bFrame) */
        if (pic_time < pInfo->VideoSequence.bFrame.time) /* B frame is first */
          h263_SWAP(h263_Frame, pInfo->VideoSequence.bFrame, pInfo->VideoSequence.nFrame);
        pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.bFrame;
      } else
        pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.nFrame;
    } else {
      enh_layer->n_Frame.type = VPic->picture_coding_type;
      enh_layer->n_Frame.time = pic_time;
      enh_layer->v_Frame = &enh_layer->n_Frame;
    }

    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_B);
    break;

  case H263_PIC_TYPE_EI :
    enh_layer = pInfo->VideoSequence.enhLayers[VPic->enh_layer_num - 2];
    h263_SWAP(h263_Frame, enh_layer->r_Frame, enh_layer->c_Frame);
    if (enh_layer->picIndex)
      enh_layer->v_Frame = &enh_layer->r_Frame;
    else
      enh_layer->v_Frame = NULL;

    lowFrame = (VPic->ref_layer_num == 1 ? &pInfo->VideoSequence.cFrame :
                                           &pInfo->VideoSequence.enhLayers[VPic->ref_layer_num - 2]->c_Frame);

    if (VPic->format.scalability_type == H263_SCALABILITY_SNR) {
      enh_layer->l_Frame = lowFrame;
    } else {
      enh_layer->l_Frame = &enh_layer->n_Frame;
      if ((status = h263_SpatialInterpolateFrame(lowFrame, enh_layer->l_Frame, VPic->format.scalability_type)) != H263_STATUS_OK) {
        h263_Error("Error when doing spatially interpolation");
        return status;
      }
    }

    status = h263_DecodeFrame_EI(pInfo, enh_layer);

    h263_PadFrame(&enh_layer->c_Frame);

    enh_layer->c_Frame.type = VPic->picture_coding_type;
    enh_layer->c_Frame.time = pic_time;

    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_EI);
    break;

  case H263_PIC_TYPE_EP :
    enh_layer = pInfo->VideoSequence.enhLayers[VPic->enh_layer_num - 2];
    h263_SWAP(h263_Frame, enh_layer->r_Frame, enh_layer->c_Frame);
    enh_layer->v_Frame = &enh_layer->r_Frame;

    if (VPic->modes.resample || VPic->implicit_resample) {
      h263_Resample(pInfo, &enh_layer->r_Frame, &enh_layer->a_Frame, VPic->implicit_resample);
      refFrame = &enh_layer->a_Frame;
    } else
      refFrame = &enh_layer->r_Frame;

    lowFrame = (VPic->ref_layer_num == 1 ? &pInfo->VideoSequence.cFrame :
                                           &pInfo->VideoSequence.enhLayers[VPic->ref_layer_num - 2]->c_Frame);

    if (VPic->format.scalability_type == H263_SCALABILITY_SNR) {
      enh_layer->l_Frame = lowFrame;
    } else {
      enh_layer->l_Frame = &enh_layer->n_Frame;
      if ((status = h263_SpatialInterpolateFrame(lowFrame, enh_layer->l_Frame, VPic->format.scalability_type)) != H263_STATUS_OK) {
        h263_Error("Error when doing spatial interpolation");
        return status;
      }
    }

    status = h263_DecodeFrame_EP(pInfo, enh_layer, refFrame);

    h263_PadFrame(&enh_layer->c_Frame);

    enh_layer->c_Frame.type = VPic->picture_coding_type;
    enh_layer->c_Frame.time = pic_time;

    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_EP);
    break;
  }

  pInfo->VideoSequence.prev_pic_type[VPic->enh_layer_num - 1] = VPic->picture_coding_type;
  h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic);

  return status;
}

void h263_CopyMacroBlocks(h263_Frame *rFrame, h263_Frame *cFrame, Ipp32s mbPerRow, Ipp32s rowNum, Ipp32s colNum, Ipp32s n)
{
  Ipp32s  i, stepYr, stepYc, stepCbr, stepCbc, stepCrr, stepCrc;
  Ipp8u   *pYc, *pCbc, *pCrc, *pYr, *pCbr, *pCrr;

  if (n <= 0)
    return;
  stepYc = cFrame->stepY;
  stepCbc = cFrame->stepCb;
  stepCrc = cFrame->stepCr;
  stepYr = rFrame->stepY;
  stepCbr = rFrame->stepCb;
  stepCrr = rFrame->stepCr;
  pYc = cFrame->pY + (rowNum * stepYc + colNum) * 16;
  pCbc = cFrame->pCb + (rowNum * stepCbc + colNum) * 8;
  pCrc = cFrame->pCr + (rowNum * stepCrc + colNum) * 8;
  pYr = rFrame->pY + (rowNum * stepYr + colNum) * 16;
  pCbr = rFrame->pCb + (rowNum * stepCbr + colNum) * 8;
  pCrr = rFrame->pCr + (rowNum * stepCrr + colNum) * 8;
  for (i = rowNum * mbPerRow + colNum; i < rowNum * mbPerRow + colNum + n; i ++) {
    ippiCopy16x16_8u_C1R(pYr, stepYr, pYc, stepYc);
    ippiCopy8x8_8u_C1R(pCbr, stepCbr, pCbc, stepCbc);
    ippiCopy8x8_8u_C1R(pCrr, stepCrr, pCrc, stepCrc);
    if ((i + 1) % mbPerRow == 0) {
      pYc += (2 * H263_NUM_EXT_MB + 1) * 16 + (stepYc << 4) - stepYc;
      pCbc += (2 * H263_NUM_EXT_MB + 1) * 8 + (stepCbc << 3) - stepCbc;
      pCrc += (2 * H263_NUM_EXT_MB + 1) * 8 + (stepCrc << 3) - stepCrc;
      pYr += (2 * H263_NUM_EXT_MB + 1) * 16 + (stepYr << 4) - stepYr;
      pCbr += (2 * H263_NUM_EXT_MB + 1) * 8 + (stepCbr << 3) - stepCbr;
      pCrr += (2 * H263_NUM_EXT_MB + 1) * 8 + (stepCrr << 3) - stepCrr;
    } else {
      pYc += 16; pCrc += 8; pCbc += 8;
      pYr += 16; pCrr += 8; pCbr += 8;
    }
  }
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -