⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 h263decframe.c

📁 这是在PCA下的基于IPP库示例代码例子,在网上下了IPP的库之后,设置相关参数就可以编译该代码.
💻 C
📖 第 1 页 / 共 3 页
字号:
    pic_time = pInfo->VideoSequence.ref_pic_time + temp_ref_delta * time_factor;  switch (VPic->picture_coding_type) {  case H263_PIC_TYPE_I :    /* set new video frame */    if (pInfo->VideoSequence.PicIndex == 0) {      pInfo->VideoSequence.vFrame = NULL;    } else {      h263_SWAP(h263_Frame, pInfo->VideoSequence.rFrame, pInfo->VideoSequence.cFrame);      H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.cFrame);      H263_MBINFO_CHECK_RESIZE(VPic, &pInfo->VideoSequence); /* in case frame size is changed starting from this I-frame */      if (pInfo->VideoSequence.Bpics_to_show > 0) {        pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.nFrame;        pInfo->VideoSequence.Bpics_to_show--;        pInfo->VideoSequence.Ppics_to_show++;      } else        pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.rFrame;    }#ifdef _OMP_KARABAS    if (/*!pInfo->VideoSequence.data_partitioned && */ pInfo->number_threads > 1)      status = h263_DecodeFrame_I_MT(pInfo);    else#endif    status = h263_DecodeFrame_I(pInfo);    h263_PadFrame(&pInfo->VideoSequence.cFrame);    pInfo->VideoSequence.cFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type;    /* save current frame time */    pInfo->VideoSequence.cFrame.time = pic_time;    /* set past and future time for B-Plane */    pInfo->VideoSequence.rTime = pInfo->VideoSequence.nTime;    pInfo->VideoSequence.nTime = pic_time;    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_I);    break;  case H263_PIC_TYPE_P :    /* set new video frame */    h263_SWAP(h263_Frame, pInfo->VideoSequence.rFrame, pInfo->VideoSequence.cFrame);    H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.cFrame); /* in case there was resampling */    if (pInfo->VideoSequence.Bpics_to_show > 0) {      pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.nFrame;      pInfo->VideoSequence.Bpics_to_show--;      pInfo->VideoSequence.Ppics_to_show++;    } else      pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.rFrame;    if (!VPic->modes.resample && // VPic->plusptype &&      (VPic->width != pInfo->VideoSequence.rFrame.width || VPic->height != pInfo->VideoSequence.rFrame.height)) {      implicit_resample = 1;      VPic->wda = 3;      VPic->fillMode = 3;      h263_Zero4MV(VPic->warpParams);    }    if (VPic->modes.resample || implicit_resample) {      H263_MBINFO_CHECK_RESIZE(VPic, &pInfo->VideoSequence);      H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.cFrame);      H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.bFrame);      h263_Resample(pInfo, &pInfo->VideoSequence.rFrame, &pInfo->VideoSequence.bFrame, implicit_resample);      refFrame = &pInfo->VideoSequence.bFrame;    } else      refFrame = &pInfo->VideoSequence.rFrame;    pInfo->VideoSequence.refFrame = refFrame;#ifdef _OMP_KARABAS    if (/*!pInfo->VideoSequence.data_partitioned && */ pInfo->number_threads > 1)      status = h263_DecodeFrame_P_MT(pInfo, refFrame);    else#endif    status = h263_DecodeFrame_P(pInfo, refFrame);    h263_PadFrame(&pInfo->VideoSequence.cFrame);    pInfo->VideoSequence.cFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type;    /* save current frame time */    pInfo->VideoSequence.cFrame.time = pic_time;    /* set past and future time for B-Frame */    pInfo->VideoSequence.rTime = pInfo->VideoSequence.nTime;    pInfo->VideoSequence.nTime = pic_time;    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_P);    break;  case H263_PIC_TYPE_PB :  case H263_PIC_TYPE_iPB :    /* set new video frame */    h263_SWAP(h263_Frame, pInfo->VideoSequence.rFrame, pInfo->VideoSequence.cFrame);    H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.cFrame); /* in case there was resampling */    if (!VPic->modes.resample && // VPic->plusptype &&      (VPic->width != pInfo->VideoSequence.rFrame.width || VPic->height != pInfo->VideoSequence.rFrame.height)) {      implicit_resample = 1;      VPic->wda = 3;      VPic->fillMode = 3;      h263_Zero4MV(VPic->warpParams);    }    if (pInfo->VideoSequence.Bpics_to_show > 0) {      h263_SWAP(h263_Frame, pInfo->VideoSequence.nFrame, pInfo->VideoSequence.bFrame);      H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.nFrame); /* in case there was resampling */      pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.bFrame;      pInfo->VideoSequence.Ppics_to_show++;    } else {      pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.rFrame;      pInfo->VideoSequence.Bpics_to_show++;    }    if (VPic->modes.resample || implicit_resample) {      H263_MBINFO_CHECK_RESIZE(VPic, &pInfo->VideoSequence);      H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.cFrame);      H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.nFrame);      H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.aFrame);      h263_Resample(pInfo, &pInfo->VideoSequence.rFrame, &pInfo->VideoSequence.aFrame, implicit_resample);      refFrame = &pInfo->VideoSequence.aFrame;    } else      refFrame = &pInfo->VideoSequence.rFrame;    pInfo->VideoSequence.refFrame = refFrame;#ifdef _OMP_KARABAS    if (/*!pInfo->VideoSequence.data_partitioned && */ pInfo->number_threads > 1)      status = h263_DecodeFrame_P_MT(pInfo, refFrame);    else#endif    status = h263_DecodeFrame_P(pInfo, refFrame);    h263_PadFrame(&pInfo->VideoSequence.cFrame);    pInfo->VideoSequence.cFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type;    pInfo->VideoSequence.nFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type; /* ??? */    /* save current frame time */    pInfo->VideoSequence.cFrame.time = pic_time;    pInfo->VideoSequence.nFrame.time = pInfo->VideoSequence.pic_time_pb;    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_PB);    /* set past and future time for B-Frames */    pInfo->VideoSequence.rTime = pInfo->VideoSequence.nTime;    pInfo->VideoSequence.nTime = pic_time;    break;  case H263_PIC_TYPE_B :    pInfo->VideoSequence.TRB = (int)(pic_time - pInfo->VideoSequence.rTime);    pInfo->VideoSequence.TRD = (int)(pInfo->VideoSequence.nTime - pInfo->VideoSequence.rTime);    if (VPic->enh_layer_num == 1) {      if (pInfo->VideoSequence.Bpics_to_show > 0) {        h263_SWAP(h263_Frame, pInfo->VideoSequence.bFrame, pInfo->VideoSequence.nFrame);        H263_FRAME_CHECK_RESIZE(VPic, pInfo->VideoSequence.nFrame); /* in case there was resampling */      }      refFrame = &pInfo->VideoSequence.rFrame;      curFrame = &pInfo->VideoSequence.nFrame;      auxFrame = &pInfo->VideoSequence.aFrame;    } else {      enh_layer = pInfo->VideoSequence.enhLayers[VPic->enh_layer_num - 2];      if (!enh_layer) {        if (!(enh_layer = h263_InitEnhancedLayer(pInfo)))          return H263_STATUS_NO_MEM;        pInfo->VideoSequence.enhLayers[VPic->enh_layer_num - 2] = enh_layer;      }      curFrame = &enh_layer->n_Frame;      auxFrame = &enh_layer->a_Frame;      if (VPic->ref_layer_num == 1)        refFrame = &pInfo->VideoSequence.rFrame;      else {       ref_layer = pInfo->VideoSequence.enhLayers[VPic->ref_layer_num - 2];       refFrame = &ref_layer->r_Frame;      }    }    if (!VPic->modes.resample && // VPic->plusptype &&      (VPic->width != refFrame->width || VPic->height != refFrame->height)) {      implicit_resample = 1;      VPic->wda = 3;      VPic->fillMode = 3;      h263_Zero4MV(VPic->warpParams);    }    if (VPic->modes.resample || implicit_resample) {      int modiFlag;      H263_FRAME_CHECK_RESIZE(VPic, (*curFrame));      H263_COMPARE_RESAMPLE_PARAMS(modiFlag, &pInfo->VideoSequence, VPic);      if (modiFlag) {        H263_FRAME_CHECK_RESIZE(VPic, (*auxFrame));        h263_Resample(pInfo, refFrame, auxFrame, implicit_resample);        refFrame = auxFrame;      } else        refFrame = (VPic->enh_layer_num == 1 ? pInfo->VideoSequence.refFrame : &enh_layer->a_Frame);    }    status = h263_DecodeFrame_B(pInfo, enh_layer, refFrame);    if (VPic->enh_layer_num == 1) {      pInfo->VideoSequence.nFrame.type = pInfo->VideoSequence.VideoPicture.picture_coding_type;      pInfo->VideoSequence.nFrame.time = pic_time;      if (pInfo->VideoSequence.Bpics_to_show > 0) {        /* check what goes first: B (nFrame) or B-part (bFrame) */        if (pic_time < pInfo->VideoSequence.bFrame.time) /* B frame is first */          h263_SWAP(h263_Frame, pInfo->VideoSequence.bFrame, pInfo->VideoSequence.nFrame);        pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.bFrame;      } else        pInfo->VideoSequence.vFrame = &pInfo->VideoSequence.nFrame;    } else {      enh_layer->n_Frame.type = VPic->picture_coding_type;      enh_layer->n_Frame.time = pic_time;      enh_layer->v_Frame = &enh_layer->n_Frame;    }    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_B);    break;  case H263_PIC_TYPE_EI :    enh_layer = pInfo->VideoSequence.enhLayers[VPic->enh_layer_num - 2];    if (!enh_layer) {      if (!(enh_layer = h263_InitEnhancedLayer(pInfo)))        return H263_STATUS_NO_MEM;      H263_MBINFO_CHECK_RESIZE(VPic, &pInfo->VideoSequence);      /* TODO: skip all enhanced layer frames if not enough memory ??? */      pInfo->VideoSequence.enhLayers[VPic->enh_layer_num - 2] = enh_layer;      enh_layer->v_Frame = NULL;    } else {      h263_SWAP(h263_Frame, enh_layer->r_Frame, enh_layer->c_Frame);      H263_FRAME_CHECK_RESIZE(VPic, enh_layer->c_Frame);      enh_layer->v_Frame = &enh_layer->r_Frame;      H263_MBINFO_CHECK_RESIZE(VPic, &pInfo->VideoSequence); /* in case frame size is changed starting from this EI-frame */    }    lowFrame = (VPic->ref_layer_num == 1 ? &pInfo->VideoSequence.cFrame : &pInfo->VideoSequence.enhLayers[VPic->ref_layer_num - 1]->c_Frame);    if (VPic->format.scalability_type == H263_SCALABILITY_SNR) {      enh_layer->l_Frame = lowFrame;    } else {      enh_layer->l_Frame = &enh_layer->n_Frame;      if ((status = h263_SpatialInterpolateFrame(lowFrame, enh_layer->l_Frame, VPic->format.scalability_type)) != H263_STATUS_OK) {        h263_Error("Error when doing spatially interpolation");        return status;      }    }    status = h263_DecodeFrame_EI(pInfo, enh_layer);    h263_PadFrame(&enh_layer->c_Frame);    enh_layer->c_Frame.type = VPic->picture_coding_type;    enh_layer->c_Frame.time = pic_time;    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_EI);    break;  case H263_PIC_TYPE_EP :    enh_layer = pInfo->VideoSequence.enhLayers[VPic->enh_layer_num - 2];    /* do not need to check if (!enh_layer) - layers can not start with EP frame */    h263_SWAP(h263_Frame, enh_layer->r_Frame, enh_layer->c_Frame);    H263_FRAME_CHECK_RESIZE(VPic, enh_layer->c_Frame); /* in case there was resampling */    enh_layer->v_Frame = &enh_layer->r_Frame;    if (!VPic->modes.resample && // VPic->plusptype &&      (VPic->width != enh_layer->r_Frame.width || VPic->height != enh_layer->r_Frame.height)) {      implicit_resample = 1;      VPic->wda = 3;      VPic->fillMode = 3;      h263_Zero4MV(VPic->warpParams);    }    if (VPic->modes.resample || implicit_resample) {      H263_MBINFO_CHECK_RESIZE(VPic, &pInfo->VideoSequence);      H263_FRAME_CHECK_RESIZE(VPic, enh_layer->c_Frame);      H263_FRAME_CHECK_RESIZE(VPic, enh_layer->n_Frame);      H263_FRAME_CHECK_RESIZE(VPic, enh_layer->a_Frame);      h263_Resample(pInfo, &enh_layer->r_Frame, &enh_layer->a_Frame, implicit_resample);      refFrame = &enh_layer->a_Frame;    } else      refFrame = &enh_layer->r_Frame;    lowFrame = (VPic->ref_layer_num == 1 ? &pInfo->VideoSequence.cFrame : &pInfo->VideoSequence.enhLayers[VPic->ref_layer_num - 1]->c_Frame);    if (VPic->format.scalability_type == H263_SCALABILITY_SNR) {      enh_layer->l_Frame = lowFrame;    } else {      enh_layer->l_Frame = &enh_layer->n_Frame;      if ((status = h263_SpatialInterpolateFrame(lowFrame, enh_layer->l_Frame, VPic->format.scalability_type)) != H263_STATUS_OK) {        h263_Error("Error when doing spatial interpolation");        return status;      }    }    status = h263_DecodeFrame_EP(pInfo, enh_layer, refFrame);    h263_PadFrame(&enh_layer->c_Frame);    enh_layer->c_Frame.type = VPic->picture_coding_type;    enh_layer->c_Frame.time = pic_time;    h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic_EP);    break;  }  pInfo->VideoSequence.prev_pic_type[VPic->enh_layer_num - 1] = VPic->picture_coding_type;  h263_StatisticInc(&pInfo->VideoSequence.Statistic.nPic);  return status;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -