📄 umc_h264_enc_cpb.cpp
字号:
//
// INTEL CORPORATION PROPRIETARY INFORMATION
// This software is supplied under the terms of a license agreement or
// nondisclosure agreement with Intel Corporation and may not be copied
// or disclosed except in accordance with the terms of that agreement.
// Copyright (c) 2004 - 2007 Intel Corporation. All Rights Reserved.
#include "umc_h264_video_encoder.h"
#include "umc_h264_enc_cpb.h"
#include "umc_h264_tables.h"
#include "umc_video_processing.h"
namespace UMC_H264_ENCODER
{
template <class PixType>
H264EncoderFrame<PixType>::H264EncoderFrame( VideoData* in, MemoryAllocator *pMemAlloc
#if defined ALPHA_BLENDING_H264
, Ipp32s alpha
#endif
, Ipp32s downScale
) :
VideoData()
, m_RefPic(false)
, m_wasEncoded(false)
, m_bIsIDRPic(false)
, frameData(0)
, m_pRefPicList(0)
, m_FrameNum(0)
, m_NumSlices(1)
, m_pPreviousFrame(NULL)
, m_pFutureFrame(NULL)
, pts_start(-1)
, pts_end(-1)
#if defined ALPHA_BLENDING_H264
, m_isAuxiliary(0)
, alpha_plane(-1)
#endif
, memAlloc(pMemAlloc)
, frameDataID(0)
, mbsDataID(0)
, refListDataID(0)
{
m_paddedParsedFrameDataSize.height = m_paddedParsedFrameDataSize.width = 0;
this->m_pitchBytes = 0; //TODO call base class constructor
this->m_pitchPixels = 0; //TODO call base class constructor
mbsData = NULL;
m_isShortTermRef[0] = m_isShortTermRef[1] = false;
m_isLongTermRef[0] = m_isLongTermRef[1] = false;
m_FrameNumWrap = m_FrameNum = -1;
m_LongTermFrameIdx = -1;
m_RefPicListResetCount[0] = m_RefPicListResetCount[1] = 0;
m_PicNum[0] = m_PicNum[1] = -1;
m_LongTermPicNum[0] = m_PicNum[1] = -1;
m_PicOrderCounterAccumulated = m_PicOrderCnt[0] = m_PicOrderCnt[1] = 0;
m_macroBlockSize.height = m_macroBlockSize.width = 0;
m_pYPlane = m_pUPlane = m_pVPlane = NULL;
//Init VideoData parameters
ColorFormat cf = in->GetColorFormat();
ColorFormat out_cf = YUV420;
//Use color conversion for other formats
switch( cf ){
case GRAY:
case YUV420:
case YUV422:
out_cf = cf;
break;
#if defined ALPHA_BLENDING_H264
case GRAYA:
if(!alpha) out_cf = GRAY;
else out_cf = cf;
alpha_plane = 1;
break;
case YUV420A:
if(!alpha) out_cf = YUV420;
else out_cf = cf;
alpha_plane = 3;
break;
case YUV422A:
if(!alpha) out_cf = YUV422;
else out_cf = cf;
alpha_plane = 3;
break;
#endif
}
if( downScale ){
uWidth = in->GetWidth()>>1;
uHeight = in->GetHeight()>>1;
}else{
uWidth = in->GetWidth();
uHeight = in->GetHeight();
}
Init( uWidth, uHeight, out_cf);
//Set bitdepth to maximum bitdepth
Ipp32s max_bit_depth = in->GetPlaneBitDepth( 0 );
Ipp32s i;
for( i = 1; i < GetNumPlanes(); i++ )
if( max_bit_depth < in->GetPlaneBitDepth( i ) ) max_bit_depth = in->GetPlaneBitDepth(i);
for( i = 0; i < GetNumPlanes(); i++ ) SetPlaneBitDepth( max_bit_depth, i );
}
template <class PixType> void H264EncoderFrame<PixType>::deallocateParsedFrameData()
{
if (refListDataID){
memAlloc->Unlock( refListDataID );
memAlloc->Free( refListDataID );
refListDataID = 0;
refListData = 0;
m_pRefPicList = 0;
}
if (mbsDataID){
memAlloc->Unlock( mbsDataID );
memAlloc->Free( mbsDataID );
mbsDataID = 0;
mbsData = NULL;
}
m_paddedParsedFrameDataSize.width = m_paddedParsedFrameDataSize.height = 0;
}
template <class PixType>
Status H264EncoderFrame<PixType>::allocateParsedFrameData(const IppiSize &size, Ipp32s num_slices)
{
Status ps = UMC_OK;
IppiSize desiredPaddedSize;
m_NumSlices = num_slices;
desiredPaddedSize.width = (size.width + 15) & ~15;
desiredPaddedSize.height = (size.height + 15) & ~15;
// If our buffer and internal pointers are already set up for this
// image size, then there's nothing more to do.
if (m_paddedParsedFrameDataSize.height != desiredPaddedSize.height && m_paddedParsedFrameDataSize.width != desiredPaddedSize.width)
{
// Determine how much space we need
Ipp32u MB_Frame_Width = desiredPaddedSize.width >> 4;
Ipp32u MB_Frame_Height = desiredPaddedSize.height >> 4;
Ipp32u uMaxNumSlices = (MAX_SLICE_NUM < 0) ?
MB_Frame_Width * MB_Frame_Height :
MIN(MAX_SLICE_NUM,MB_Frame_Width * MB_Frame_Height);
Ipp32u uRefPicListSize = uMaxNumSlices * sizeof(EncoderRefPicList<PixType>);
Ipp32u totalSize = Ipp32u(
+ uRefPicListSize + 7
+ YUV_ALIGNMENT);
deallocateParsedFrameData();
Ipp32s len = IPP_MAX(1, totalSize);
if (UMC_OK != memAlloc->Alloc(&refListDataID, len, UMC_ALLOC_PERSISTENT))
return UMC_ERR_ALLOC;
refListData = (Ipp8u *) memAlloc->Lock(refListDataID);
ippsZero_8u(refListData, len);
// Reassign our internal pointers
m_paddedParsedFrameDataSize = desiredPaddedSize;
Ipp8u *alignedData = (Ipp8u*) align_pointer<Ipp8u *> (refListData, YUV_ALIGNMENT);
Ipp32u offset = (Ipp32u)(alignedData - refListData) + uRefPicListSize;
VM_ASSERT(offset <= totalSize);
m_pRefPicList = (EncoderRefPicList<PixType>*)alignedData;
// allocate new MB structure(s)
Ipp32s nMBCount = (desiredPaddedSize.width>>4) * (desiredPaddedSize.height>>4);
// allocate buffer
len = (sizeof(H264MacroblockMVs) +
sizeof(H264MacroblockMVs) +
sizeof(H264MacroblockRefIdxs) +
sizeof(H264MacroblockRefIdxs) +
sizeof(H264MacroblockGlobalInfo)) * nMBCount + 16 * 5;
#if defined ALPHA_BLENDING_H264
if( alpha_plane >= 0 ) len *= 2;
#endif
// allocate buffer
if (UMC_OK != memAlloc->Alloc(&mbsDataID, len, UMC_ALLOC_PERSISTENT))
return UMC_ERR_ALLOC;
mbsData = (Ipp8u *) memAlloc->Lock(mbsDataID);
ippsZero_8u(mbsData, len);
// set pointer(s)
m_mbinfo.MV[0] = align_pointer<H264MacroblockMVs *> (mbsData, ALIGN_VALUE);
m_mbinfo.MV[1] = align_pointer<H264MacroblockMVs *> (m_mbinfo.MV[0]+ nMBCount, ALIGN_VALUE);
m_mbinfo.RefIdxs[0] = align_pointer<H264MacroblockRefIdxs *> (m_mbinfo.MV[1] + nMBCount, ALIGN_VALUE);
m_mbinfo.RefIdxs[1] = align_pointer<H264MacroblockRefIdxs *> (m_mbinfo.RefIdxs[0] + nMBCount, ALIGN_VALUE);
m_mbinfo.mbs = align_pointer<H264MacroblockGlobalInfo *> (m_mbinfo.RefIdxs[1] + nMBCount, ALIGN_VALUE);
#if defined ALPHA_BLENDING_H264
if( alpha_plane >= 0 ){
m_mbinfo_prim = m_mbinfo;
m_mbinfo_alpha.MV[0] = align_pointer<H264MacroblockMVs *> (m_mbinfo_prim.mbs + nMBCount, ALIGN_VALUE);
m_mbinfo_alpha.MV[1] = align_pointer<H264MacroblockMVs *> (m_mbinfo_alpha.MV[0]+ nMBCount, ALIGN_VALUE);
m_mbinfo_alpha.RefIdxs[0] = align_pointer<H264MacroblockRefIdxs *> (m_mbinfo_alpha.MV[1] + nMBCount, ALIGN_VALUE);
m_mbinfo_alpha.RefIdxs[1] = align_pointer<H264MacroblockRefIdxs *> (m_mbinfo_alpha.RefIdxs[0] + nMBCount, ALIGN_VALUE);
m_mbinfo_alpha.mbs = align_pointer<H264MacroblockGlobalInfo *> (m_mbinfo_alpha.RefIdxs[1] + nMBCount, ALIGN_VALUE);
}
#endif
num_slices*=2;//for fields
}
return ps;
} // H264EncoderFrame::allocateParsedFrameData(const IppiSize &size)
template <class PixType> H264EncoderFrameList<PixType>::H264EncoderFrameList(MemoryAllocator *pMemAlloc)
: m_pHead(0)
, m_pTail(0)
, m_pCurrent(0)
, memAlloc( pMemAlloc )
{
}
template <class PixType> H264EncoderFrame<PixType>::~H264EncoderFrame()
{
deallocateParsedFrameData();
if (frameDataID) {
memAlloc->Unlock( frameDataID );
memAlloc->Free( frameDataID );
frameData = NULL;
frameDataID = 0;
}
}
template <class PixType>
Status H264EncoderFrame<PixType>::allocate(const IppiSize &picSize, Ipp32s num_slices)
{
Status ps = UMC_OK;
// Clear our state, since allocate is called when we are about
// to decode into this frame buffer.
m_wasEncoded = false;
// Don't reset m_activeReference or m_lockedForDisplay as these are handled
// depending on frame type or by the calling application, respectively
ps = allocateParsedFrameData(picSize, num_slices);
if (ps == UMC_OK){
Ipp32s newSize;
Ipp32s pitchInBytes;
// Y plane dimensions better be even, so width/2 correctly gives U,V size
// YUV_ALIGNMENT must be a power of 2. Since this is unlikely to change,
// and since checking for a power of 2 is painful, let's just put a simple
// VM_ASSERT here, that can be updated as needed for other powers of 2.
pitchInBytes = CalcPitchFromWidth(picSize.width, sizeof(PixType));
ColorFormat cf = GetColorFormat();
switch( cf ){
case GRAY:
newSize = pitchInBytes * (picSize.height + LUMA_PADDING * 2) + pitchInBytes*2;
break;
case YUV420:
newSize = pitchInBytes * (picSize.height + LUMA_PADDING * 2) +
pitchInBytes * (picSize.height / 2 + CHROMA_PADDING * 2) + pitchInBytes*2;
break;
case YUV422:
newSize = pitchInBytes * (picSize.height + LUMA_PADDING * 2) +
pitchInBytes * (picSize.height + CHROMA_PADDING * 2) + pitchInBytes*2;
break;
#if defined ALPHA_BLENDING_H264
case GRAYA:
newSize = pitchInBytes * (picSize.height + LUMA_PADDING * 2)*2 + pitchInBytes*3;
break;
case YUV420A:
newSize = pitchInBytes * (picSize.height + LUMA_PADDING * 2)*2 +
pitchInBytes * (picSize.height / 2 + CHROMA_PADDING * 2) + pitchInBytes*3;
break;
case YUV422A:
newSize = pitchInBytes * (picSize.height + LUMA_PADDING * 2)*2 +
pitchInBytes * (picSize.height + CHROMA_PADDING * 2) + pitchInBytes*3;
break;
#endif
}
if (frameDataID) {
memAlloc->Unlock( frameDataID );
memAlloc->Free( frameDataID );
frameDataID=0;
}
if (UMC_OK != memAlloc->Alloc(&frameDataID,newSize + (DATA_ALIGN<<2) + DATA_ALIGN,UMC_ALLOC_PERSISTENT))
return UMC_ERR_ALLOC;
frameData = (Ipp8u *) memAlloc->Lock(frameDataID);
m_lumaSize = picSize;
m_pitchBytes = pitchInBytes;
m_pitchPixels = pitchInBytes/sizeof(PixType);
m_pYPlane = align_pointer<PixType*> (frameData + LUMA_PADDING *(1 + pitchInBytes), DATA_ALIGN);
SetPlanePointer(m_pYPlane,0);
SetPlanePitch( m_pitchBytes, 0 );
Ipp32s lumaSZ = pitchInBytes * (picSize.height + LUMA_PADDING * 2 + 1);
#if defined ALPHA_BLENDING_H264
if( cf == YUV422A || cf == YUV420A || cf == GRAYA){
PixType* ptr = align_pointer<PixType*> (frameData +
pitchInBytes * (picSize.height + LUMA_PADDING * 2 + 1) +
LUMA_PADDING * (1+pitchInBytes), DATA_ALIGN);
SetPlanePointer(ptr,alpha_plane);
SetPlanePitch( m_pitchBytes, alpha_plane );
lumaSZ *= 2;
}
#endif
if( cf != GRAY
#ifdef ALPHA_BLENDING_H264
&& cf != GRAYA
#endif
){
m_pUPlane = align_pointer<PixType*> (frameData + lumaSZ +
CHROMA_PADDING * (1+pitchInBytes), DATA_ALIGN);
m_pVPlane = m_pUPlane + ((pitchInBytes / sizeof(PixType))/ 2);
SetPlanePointer(m_pUPlane,1);
SetPlanePitch( m_pitchBytes, 1 );
SetPlanePointer(m_pVPlane,2);
SetPlanePitch( m_pitchBytes, 2 );
}
m_macroBlockSize.width = picSize.width >> 4;
m_macroBlockSize.height = picSize.height >> 4;
totalMBs = m_macroBlockSize.width * m_macroBlockSize.height;
}
return ps;
}
template <class PixType>
void H264EncoderFrame<PixType>::UpdateFrameNumWrap(Ipp32s CurrFrameNum, Ipp32s MaxFrameNum, Ipp32s CurrPicStruct)
{
if (isShortTermRef())
{
m_FrameNumWrap = m_FrameNum;
if (m_FrameNum > CurrFrameNum)
m_FrameNumWrap -= MaxFrameNum;
if (CurrPicStruct>=FRM_STRUCTURE)
{
setPicNum(m_FrameNumWrap,0);
m_PictureStructureForRef = FRM_STRUCTURE;
}
else
{
m_PictureStructureForRef = FLD_STRUCTURE;
if (m_bottom_field_flag[0])
{
//1st - bottom, 2nd - top
if (isShortTermRef(0)) m_PicNum[0] = (2*m_FrameNumWrap)+(CurrPicStruct==BOTTOM_FLD_STRUCTURE);
if (isShortTermRef(1)) m_PicNum[1] = (2*m_FrameNumWrap)+(CurrPicStruct==TOP_FLD_STRUCTURE);
}
else
{
//1st - top , 2nd - bottom
if (isShortTermRef(0)) setPicNum((2*m_FrameNumWrap)+(CurrPicStruct==TOP_FLD_STRUCTURE),0);
if (isShortTermRef(1)) setPicNum((2*m_FrameNumWrap)+(CurrPicStruct==BOTTOM_FLD_STRUCTURE),1);
}
}
}
} // updateFrameNumWrap
//////////////////////////////////////////////////////////////////////////////
// updateLongTermPicNum
// Updates m_LongTermPicNum for if long term reference, based upon
// m_LongTermFrameIdx.
//////////////////////////////////////////////////////////////////////////////
template <class PixType> void H264EncoderFrame<PixType>::UpdateLongTermPicNum(Ipp32s CurrPicStruct)
{
if (isLongTermRef())
{
if (CurrPicStruct>=FRM_STRUCTURE)
{
m_LongTermPicNum[0] = m_LongTermFrameIdx;
m_LongTermPicNum[1] = m_LongTermFrameIdx;
}
else
{
if (m_bottom_field_flag[0])
{
//1st - bottom, 2nd - top
m_LongTermPicNum[0] = 2*m_LongTermFrameIdx+(CurrPicStruct==BOTTOM_FLD_STRUCTURE);
m_LongTermPicNum[1] = 2*m_LongTermFrameIdx+(CurrPicStruct==TOP_FLD_STRUCTURE);
}
else
{
//1st - top , 2nd - bottom
m_LongTermPicNum[0] = 2*m_LongTermFrameIdx+(CurrPicStruct==TOP_FLD_STRUCTURE);
m_LongTermPicNum[1] = 2*m_LongTermFrameIdx+(CurrPicStruct==BOTTOM_FLD_STRUCTURE);
}
}
}
} // updateLongTermPicNum
#if defined ALPHA_BLENDING_H264
template <class PixType>
inline void H264EncoderFrame<PixType>::useAux()
{
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -