⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 umc_mpeg2_enc.cpp

📁 audio-video-codecs.rar语音编解码器
💻 CPP
📖 第 1 页 / 共 4 页
字号:
                                           MediaData *out,
                                           Ipp64f *PTS,
                                           Ipp64f *DTS)
{
  VideoData *p_buff;
  Ipp32s i, num;
  FrameType pict_type;
  Status ret;

  mEncodedSize = 0;
  if (DTS != NULL) {
    *DTS = -1;
  }

  // only last will be enough
  if (frames_buff == NULL || VideoData_buff == NULL) {
    UnlockBuffers();
    ret = CreateInternalBuffers(buff_size);
    LockBuffers();
    if(ret != UMC_OK)
      return ret;
  }
  if (m_FirstFrame) {
    buff_ind = 0;
    num_btype = 0;
    num_new = -1;
    curr_gop = 0;
  }

  if (frame) { // store incoming frame
    num_new++;
    i = buff_ind + num_new;
    if (i >= buff_size) i -= buff_size;
    p_buff = VideoData_buff + i;
    if (frame->GetPlanePointer(0) != p_buff->GetPlanePointer(0)) {
      if(frame_loader == NULL) {
        frame_loader = new VideoProcessing;
      }
      ret = frame_loader->GetFrame(frame, p_buff);
      if(ret != UMC_OK)
        return ret;
    }
    if (PTS != NULL) {
      p_buff->SetTime(*PTS);
    }
  }
  if (m_FirstFrame) {
    p_buff = VideoData_buff + num_new;
    *DTS = *PTS - 1.0/encodeInfo.info.framerate;
    p_buff->SetFrameType(I_PICTURE);
    return EncodeFrameReordered(p_buff, out);
  }
  if (num_btype > 0) { // B-frames in queue
    i = buff_ind - num_btype;
    if (i < 0) i += buff_size;
    num_btype--;
    curr_gop--;
    p_buff = VideoData_buff + i;
    if (PTS != NULL) {
      *PTS = p_buff->GetTime();
    }
    p_buff->SetFrameType(B_PICTURE);
    return EncodeFrameReordered(p_buff, out);
  }
  // no more B-frames in queue
  if (DTS != NULL) {
    i = (buff_ind + num_new) - encodeInfo.IPDistance;
    if (i >= buff_size) i -= buff_size;
    if (i < 0) i += buff_size;
    *DTS = VideoData_buff[i].GetTime();
  }
  if (curr_gop <= 0) {
    curr_gop = encodeInfo.gopSize;
  }
  num = curr_gop;
  if (num > encodeInfo.IPDistance) num = encodeInfo.IPDistance;
  if (num_new < num) {
    if (frame != NULL) {
      return UMC_OK; // wait for necessary number of frames
    }
    if (num_new <= 0) { // no incoming & no in buffer
      return UMC_ERR_NOT_ENOUGH_DATA;
    }
    num = num_new;
  }
  if (num == curr_gop) {
    pict_type = I_PICTURE;
  } else {
    pict_type = P_PICTURE;
  }
  num_btype = num - 1;
  buff_ind += num;
  num_new -= num;
  if (buff_ind >= buff_size) buff_ind -= buff_size;
  curr_gop--;
  p_buff = VideoData_buff + buff_ind;
  if (PTS != NULL) {
    *PTS = p_buff->GetTime();
  }
  p_buff->SetFrameType(pict_type);
#ifdef ME_REF_ORIGINAL
  // need to copy src to rotating buffer as well
  if(frame_loader == NULL) {
    frame_loader = new VideoProcessing;
  }
  ret = frame_loader->GetFrame(p_buff, pRotFrames[0][0]);
  if(ret != UMC_OK)
    return ret;
#endif /* ME_REF_ORIGINAL */
  ret = EncodeFrameReordered(p_buff, out);
  return ret;
}

// Encode reordered frames (B frames following the corresponding I/P frames).
// pict_type must be supplied (I_PICTURE, P_PICTURE, B_PICTURE).
// No buffering because the frames are already reordered.
Status MPEG2VideoEncoderBase::EncodeFrameReordered(VideoData* frame,
                                                   MediaData* out)
{
  FrameType pict_type;

  Ipp32s i;

  pict_type = frame->GetFrameType();

  out_pointer = (Ipp8u*)out->GetDataPointer() + out->GetDataSize();
  output_buffer_size = (Ipp32s)((Ipp8u*)out->GetBufferPointer() + out->GetBufferSize() - out_pointer);

  if (frame == NULL) {
    return UMC_ERR_NULL_PTR;
  }

  VideoData *tmp_in = frame;
  // check size as well
  if(!(encodeInfo.lFlags & FLAG_VENC_REORDER)) { // otherwise is buffered
    // check if in internal buffer (from getNextYUVpointer)
    i = buff_ind + num_new + 1; // index of last frame
    if (i >= buff_size) i -= buff_size;
    if(VideoData_buff[i].GetPlanePointer(0) != frame->GetPlanePointer(0) &&
      ((frame->GetColorFormat() != encodeInfo.info.color_format) ||
        ((Ipp32s)frame->GetPlanePitch(0) != YFrameHSize) ||
        ((Ipp32s)frame->GetPlanePitch(1) != UVFrameHSize) ||
        ((Ipp32s)frame->GetPlanePitch(2) != UVFrameHSize)
        || (frame->GetHeight() < YFrameVSize)
#ifdef ME_REF_ORIGINAL
        || pict_type != B_PICTURE  // copy to original reference buffer
#endif
      ))
    {
      // CC, resize, expand borders
#ifdef ME_REF_ORIGINAL
      if(pict_type != B_PICTURE) { // copy to original reference buffer
        tmp_in = pRotFrames[0][0];
      } else // B frames to separate buffer
#endif
      {
        if (tmpFrame_buf == NULL) {
          if (UMC_OK != m_pMemoryAllocator->Alloc(&mid_tmpFrame_buf,
            YUVFrameSize+ALIGN_VALUE, UMC_ALLOC_PERSISTENT, ALIGN_VALUE)) {
            vm_debug_trace(VM_DEBUG_ERROR, VM_STRING("External allocation failed\n"));
            return UMC_ERR_ALLOC;
          }
          tmpFrame_buf = (Ipp8u*)m_pMemoryAllocator->Lock(mid_tmpFrame_buf);
          if(!tmpFrame_buf) {
            vm_debug_trace(VM_DEBUG_ERROR, VM_STRING("External Lock failed\n"));
            return UMC_ERR_ALLOC;
          }
        }
        if (tmpFrame == NULL) {
          tmpFrame = new VideoData;
          InitInternalFrame(tmpFrame);
          SetInternalFramePointers(tmpFrame, tmpFrame_buf);
        }
        tmp_in = tmpFrame;
      }
      if(frame_loader == NULL) {
        frame_loader = new VideoProcessing;
      }
      frame_loader->GetFrame(frame, tmp_in);
    }
  }
  Ipp32s w_shift = (encodeInfo.FieldPicture) ? 1 : 0;
  Ipp32s nYPitch = YFrameHSize << w_shift;
  Ipp32s nUPitch = UVFrameHSize << w_shift;
  Ipp32s nVPitch = UVFrameHSize << w_shift;

  Y_src = (Ipp8u*)tmp_in->GetPlanePointer(0);
  U_src = (Ipp8u*)tmp_in->GetPlanePointer(1);
  V_src = (Ipp8u*)tmp_in->GetPlanePointer(2);
  if (Y_src == NULL || U_src == NULL || V_src == NULL) {
    return UMC_ERR_NULL_PTR;
  }

  // can set picture coding params here according to VideoData
  PictureStructure ps = tmp_in->GetPictureStructure();
  top_field_first = (ps == PS_BOTTOM_FIELD_FIRST || encodeInfo.info.interlace_type == PROGRESSIVE) ? 0 : 1;

  block_offset_frm[2] = 8*nYPitch;
  block_offset_frm[3] = 8*nYPitch + 8;
  block_offset_frm[6] = 8*nUPitch;
  block_offset_frm[7] = 8*nVPitch;
  block_offset_frm[10] = 8*nUPitch + 8;
  block_offset_frm[11] = 8*nVPitch + 8;

  block_offset_fld[2] = nYPitch;
  block_offset_fld[3] = nYPitch + 8;
  block_offset_fld[6] = nUPitch;
  block_offset_fld[7] = nVPitch;
  block_offset_fld[10] = nUPitch + 8;
  block_offset_fld[11] = nVPitch + 8;

  // prepare out buffers
  mEncodedSize = 0;
  if(output_buffer_size*8 < rc_ave_frame_bits*2) {
    return UMC_ERR_NOT_ENOUGH_BUFFER;
  }

  picture_coding_type = pict_type;

  switch (pict_type)
  {
  case I_PICTURE:
    B_count = 0;
    mp_f_code = 0;
    m_GOP_Start_tmp = m_GOP_Start;
    m_GOP_Start = encodeInfo.numEncodedFrames;
    break;

  case P_PICTURE:
    B_count = 0;
    mp_f_code = pMotionData[0].f_code;
    break;

  case B_PICTURE:
    B_count++;
    if (B_count >= encodeInfo.IPDistance) {
      B_count = 1;
    }

    mp_f_code = pMotionData[B_count].f_code;
    break;

  default:
    return UMC_ERR_INVALID_PARAMS;
  }

  if (picture_coding_type != B_PICTURE) {
    // scroll frames RefB <-> RefF
    VideoData **paux;  // refs to RotatingFrames, subject of rotation
    paux = pRotFrames[0];
    pRotFrames[0] = pRotFrames[1];
    pRotFrames[1] = paux;

  }
  for(i=0; i<2; i++) { // forward/backward
    YRefFrame[0][i] = (Ipp8u*)(pRotFrames[i][0]->GetPlanePointer(0));
    YRecFrame[0][i] = (Ipp8u*)(pRotFrames[i][1]->GetPlanePointer(0));
    URefFrame[0][i] = (Ipp8u*)(pRotFrames[i][0]->GetPlanePointer(1));
    URecFrame[0][i] = (Ipp8u*)(pRotFrames[i][1]->GetPlanePointer(1));
    VRefFrame[0][i] = (Ipp8u*)(pRotFrames[i][0]->GetPlanePointer(2));
    VRecFrame[0][i] = (Ipp8u*)(pRotFrames[i][1]->GetPlanePointer(2));
    YRefFrame[1][i] = YRefFrame[0][i] + YFrameHSize;
    YRecFrame[1][i] = YRecFrame[0][i] + YFrameHSize;
    URefFrame[1][i] = URefFrame[0][i] + UVFrameHSize;
    URecFrame[1][i] = URecFrame[0][i] + UVFrameHSize;
    VRefFrame[1][i] = VRefFrame[0][i] + UVFrameHSize;
    VRecFrame[1][i] = VRecFrame[0][i] + UVFrameHSize;
  }

  if (!encodeInfo.FieldPicture) {
    curr_frame_pred = curr_frame_dct = encodeInfo.frame_pred_frame_dct[picture_coding_type - 1];
  } else {
    curr_frame_pred = !encodeInfo.allow_prediction16x8;
    curr_frame_dct = 1;
  }
  if(curr_frame_dct == 1)
    curr_scan = encodeInfo.altscan_tab[picture_coding_type - 1] = encodeInfo.FieldPicture;
  else
    curr_scan = encodeInfo.altscan_tab[picture_coding_type - 1];

  curr_intra_vlc_format = encodeInfo.intraVLCFormat[picture_coding_type - 1];
  ipflag = 0;

  if (!encodeInfo.FieldPicture) {
    picture_structure = FRAME_PICTURE;
    curr_field = 0;
    second_field = 0;
    PutPicture();
  } else {
    Ipp8u *pSrc[3] = {Y_src, U_src, V_src};
    MBInfo *pMBInfo0 = pMBInfo;
    Ipp64s field_endpos = 0;
    Ipp32s ifield;

    YFrameVSize >>= 1;
    UVFrameVSize >>= 1;
    YFrameSize >>= 1; // ???
    UVFrameSize >>= 1;

    YFrameHSize<<=1;
    UVFrameHSize<<=1;

    for (ifield = 0; ifield < 2; ifield++) {
      picture_structure = (ifield != top_field_first) ? TOP_FIELD : BOTTOM_FIELD;
      curr_field   = (picture_structure == BOTTOM_FIELD) ? 1 :0;
      second_field = ifield;
      if (second_field && picture_coding_type == I_PICTURE) {
        ipflag = 1;
        picture_coding_type = P_PICTURE;
        mp_f_code = pMotionData[0].f_code;
      }

      if (picture_structure == TOP_FIELD) {
        Y_src = pSrc[0];
        U_src = pSrc[1];
        V_src = pSrc[2];
        if (picture_coding_type == P_PICTURE && ifield) {
          YRefFrame[1][0] = YRefFrame[1][1];
          URefFrame[1][0] = URefFrame[1][1];
          VRefFrame[1][0] = VRefFrame[1][1];
          YRecFrame[1][0] = YRecFrame[1][1];
          URecFrame[1][0] = URecFrame[1][1];
          VRecFrame[1][0] = VRecFrame[1][1];
        }
        pMBInfo = pMBInfo0;
      } else {
        Y_src = pSrc[0] + (YFrameHSize >> 1);
        U_src = pSrc[1] + (UVFrameHSize >> 1);
        V_src = pSrc[2] + (UVFrameHSize >> 1);
        if (picture_coding_type == P_PICTURE && ifield) {
          YRefFrame[0][0] = YRefFrame[0][1];
          URefFrame[0][0] = URefFrame[0][1];
          VRefFrame[0][0] = VRefFrame[0][1];
          YRecFrame[0][0] = YRecFrame[0][1];
          URecFrame[0][0] = URecFrame[0][1];
          VRecFrame[0][0] = VRecFrame[0][1];
        }
        pMBInfo = pMBInfo0 + (YFrameSize/(16*16));
      }

      PutPicture();
      field_endpos = 8*mEncodedSize;
      ipflag = 0;
    }

    Y_src = pSrc[0];
    U_src = pSrc[1];
    V_src = pSrc[2];

    // restore params
    pMBInfo = pMBInfo0;

    YFrameVSize <<= 1;
    UVFrameVSize <<= 1;
    YFrameSize <<= 1;
    UVFrameSize <<= 1;
    YFrameHSize>>=1;
    UVFrameHSize>>=1;
  }

  if (encodeInfo.me_auto_range)
  { // adjust ME search range
    if (picture_coding_type == P_PICTURE) {
      AdjustSearchRange(0, 0);
    } else if (picture_coding_type == B_PICTURE) {
      AdjustSearchRange(B_count, 0);
      AdjustSearchRange(B_count, 1);
    }
  }

  if(encodeInfo.IPDistance>1) closed_gop = 0;
  encodeInfo.numEncodedFrames++;
  out->SetDataSize(out->GetDataSize() + mEncodedSize);
  out->SetFrameType(picture_coding_type);

  return (UMC_OK);
}

void MPEG2VideoEncoderBase::setUserData(vm_char* data, Ipp32s len)
{
  encodeInfo.UserData = data;
  encodeInfo.UserDataLen = len;
}

Status MPEG2VideoEncoderBase::PutPicture()
{
  Ipp32s i;
  Ipp32s isfield;
  Ipp32s bitsize;
  Ipp64f target_size;
  Ipp32s wanted_size;
  Ipp32s size;
  Ipp8u *p;
  Status status;

  Ipp32s prevq=0, oldq;
  Ipp32s ntry;

  bQuantiserChanged = false;
  bSceneChanged = false;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -