📄 umc_h264_gen_enc.cpp
字号:
}
*/
re_encode_slice:
// Compress one slice
if (m_is_cur_pic_afrm){
m_Slices[slice].status = Compress_Slice_MBAFF(m_Slices + slice);
}else{
/* if( m_Slices[slice].m_slice_type == BPREDSLICE ){
Ipp32s costP,costB,mbICount;
m_Slices[slice].m_slice_type = PREDSLICE;
costP = ME_EstimateSliceTypeCost(m_Slices + slice, slice == m_info.num_slices*m_field_index);
mbICount = m_Slices[slice].m_Intra_MB_Counter;
m_Slices[slice].m_slice_type = BPREDSLICE;
costB = ME_EstimateSliceTypeCost(m_Slices + slice, slice == m_info.num_slices*m_field_index);
fprintf(stderr," slice cost P = %d B = %d %d %d %d\n",costP,costB, mbICount, m_Slices[slice].m_Intra_MB_Counter, m_Slices[slice].m_MB_Counter);
if( costP < costB || m_Slices[slice].m_Intra_MB_Counter > m_Slices[slice].m_MB_Counter/3 ){
m_Slices[slice].m_slice_type = PREDSLICE;
if( ePic_Class == DISPOSABLE_PIC) m_uFrameCounter++;
m_PicClass = ePic_Class = REFERENCE_PIC;
//m_pCurrentFrame->setFrameNum(m_pCurrentFrame->getFrameNum()+1);
UpdateRefPicList(m_Slices + slice, m_pCurrentFrame->GetRefPicLists(slice), m_SliceHeader, &m_ReorderInfoL0, &m_ReorderInfoL1);
}
}
*/ m_Slices[slice].status = Compress_Slice(m_Slices + slice, slice == m_info.num_slices*m_field_index);
// fprintf(stderr," real numbers %d\n", m_Slices[slice].m_Intra_MB_Counter );
}
EnumSliceType &slice_type = m_Slices[slice].m_slice_type;
if ((slice_type == PREDSLICE) || (slice_type == BPREDSLICE)){
if (Intra_Slice_Test()){ // Force this slice to be INTRA
slice_type = INTRASLICE;
m_pCurrentFrame->SetPicCodType(INTRAPIC);
m_Slices[slice].m_pbitstream->ResetRBSP(); // Reset the RBSP
goto re_encode_slice; //FIXME: Do we need to restore original frame?
}
}
if (!m_PicParamSet.entropy_coding_mode)
m_Slices[slice].m_pbitstream->WriteTrailingBits(); // Write RBSP Trailing Bits
}
//Write slice to the stream in order
// Copy Slice RBSP to the end of the output buffer after
// Adding start codes and SC emulation prevention.
for (slice = (Ipp32s)m_info.num_slices*m_field_index; slice < m_info.num_slices*(m_field_index+1); slice++){
if(dst->GetDataSize() + (Ipp32s)m_Slices[slice].m_pbitstream->GetBsSize() + 5 /* possible extra bytes */ > dst->GetBufferSize()) {
bufferOverflowFlag = true;
}else {
//Write to output bitstream
dst->SetDataSize( dst->GetDataSize() +
m_Slices[slice].m_pbitstream->EndOfNAL(
(Ipp8u*)dst->GetDataPointer()+dst->GetDataSize(),
(ePic_Class != DISPOSABLE_PIC),
#if defined ALPHA_BLENDING_H264
(alpha) ? NAL_UT_LAYERNOPART :
#endif // ALPHA_BLENDING_H264
((ePic_Class == IDR_PIC) ? NAL_UT_IDR_SLICE : NAL_UT_SLICE),
startPicture));
}
buffersNotFull = buffersNotFull && m_Slices[slice].m_pbitstream->CheckBsLimit();
}
if(bufferOverflowFlag ){
if(m_Analyse & ANALYSE_RECODE_FRAME) { //Output buffer overflow
goto recode_check;
}else{
status = UMC::UMC_ERR_NOT_ENOUGH_BUFFER;
goto done;
}
}
// check for buffer overrun on some of slices
if (!buffersNotFull ){
if( m_Analyse & ANALYSE_RECODE_FRAME ){
goto recode_check;
}else{
status = EncodeDummyFrame(dst);
m_bMakeNextFrameKey = true;
if (status == UMC_OK) goto end_of_frame;
else goto done;
}
}
//Deblocking all frame/field
for (slice = (Ipp32s)m_info.num_slices*m_field_index; slice < m_info.num_slices*(m_field_index+1); slice++){
if (m_Slices[slice].status != UMC_OK){
// It is unreachable in the current implementation, so there is no problem!!!
m_bMakeNextFrameKey = true;
VM_ASSERT(0);// goto done;
}else if(ePic_Class != DISPOSABLE_PIC){
DeblockSlice(m_Slices + slice, m_Slices[slice].m_first_mb_in_slice + m_WidthInMBs*m_HeightInMBs*m_field_index,
m_Slices[slice].m_MB_Counter, slice == m_info.num_slices - 1);
}
}
end_of_frame:
End_Picture();
m_HeightInMBs <<= (Ipp8u)(m_pCurrentFrame->m_PictureStructureForDec < FRM_STRUCTURE); //Do we need it here?
#if defined ALPHA_BLENDING_H264
} while(!alpha && m_SeqParamSet.aux_format_idc );
if(alpha){
// m_pCurrentFrame->usePrimary();
m_cpb.switchToPrimary();
m_dpb.switchToPrimary();
}
#endif // ALPHA_BLENDING_H264
for(slice = 0; slice < m_info.num_slices*((m_info.coding_type == 1) + 1); slice++) //TODO fix for PicAFF/AFRM
m_total_bits_encoded += m_pbitstreams[slice]->GetBsOffset();
switch (ePic_Class){
case IDR_PIC:
case REFERENCE_PIC:
UpdateRefPicMarking();
break;
case DISPOSABLE_PIC:
default:
// Unsupported Picture Class
break;
}
}
#if 0
char t;
switch( ePictureType ){
case INTRAPIC:
t='I';
break;
case PREDPIC:
t='P';
break;
case BPREDPIC:
t='B';
break;
}
fprintf(stderr, "%c\t%d\t%d\n",t, (Ipp32s)m_total_bits_encoded, avbr.GetQP(m_PicType));
#endif
if (m_info.rate_controls.method == H264_RCM_VBR || m_info.rate_controls.method == H264_RCM_CBR) {
avbr.PostFrame(m_PicType, (Ipp32s)m_total_bits_encoded);
}
recode_check:
if( (m_Analyse & ANALYSE_RECODE_FRAME) && ( bufferOverflowFlag || !buffersNotFull ) ){ //check frame size to be less than output buffer otherwise change qp and recode frame
Ipp32s qp = avbr.GetQP(m_PicType);
if( qp == 51 ){
status = UMC::UMC_ERR_NOT_ENOUGH_BUFFER;
goto done;
}
avbr.SetQP(m_PicType, ++qp);
frame_recode = 1;
bufferOverflowFlag = false;
buffersNotFull = true;
}else frame_recode = 0;
}while( frame_recode );
if( m_Analyse & ANALYSE_RECODE_FRAME ){
exchangeFrameYUVPointers(m_pReconstructFrame, m_pCurrentFrame);
delete m_pReconstructFrame;
}
// ------------------------------------------------------------------------
// Copy the compressed image to the output area.
// ------------------------------------------------------------------------
if (dst->GetDataSize() == 0)
{
m_bMakeNextFrameKey = true;
status = UMC_ERR_FAILED;
goto done;
}
// ------------------------------------------------------------------------
// update states for next frame, etc.
// ------------------------------------------------------------------------
// Increment the Frame Number (decode order), not incremented for any frame
// directly preceded (in decode order) by a disposable frame.
#ifdef H264_STAT
hstats.addFrame( ePictureType, dst->GetDataSize());
#endif
m_uFrames_Num++;
done:
return status;
}
//
// move all frames in WaitingForRef to ReadyToEncode
//
template <class PixType, class CoeffsType> Status H264CoreEncoder<PixType,CoeffsType>::MoveFromCPBToDPB()
{
// EnumPicCodType ePictureType;
m_cpb.RemoveFrame(m_pCurrentFrame);
m_dpb.insertAtCurrent(m_pCurrentFrame);
return UMC_OK;
}
template <class PixType, class CoeffsType> Status H264CoreEncoder<PixType,CoeffsType>::CleanDPB()
{
H264EncoderFrame<PixType> *pFrm=m_dpb.findNextDisposable();
// EnumPicCodType ePictureType;
Status ps = UMC_OK;
while(pFrm!=NULL) {
m_dpb.RemoveFrame(pFrm);
m_cpb.insertAtCurrent(pFrm);
pFrm=m_dpb.findNextDisposable();
}
return ps;
}
/*************************************************************
* Name: SetSequenceParameters
* Description: Fill in the Sequence Parameter Set for this
* sequence. Can only change at an IDR picture.
*************************************************************/
template <class PixType, class CoeffsType> void H264CoreEncoder<PixType,CoeffsType>::SetSequenceParameters()
{
m_SeqParamSet.profile_idc = m_info.profile_idc;
// We don't meet any of these contraints yet
m_SeqParamSet.constraint_set0_flag = 0;
m_SeqParamSet.constraint_set1_flag = 1;
m_SeqParamSet.constraint_set2_flag = 0;
m_SeqParamSet.constraint_set3_flag = 0;
m_SeqParamSet.seq_parameter_set_id = 0;
// Frame numbers are mod 256.
m_SeqParamSet.log2_max_frame_num = 8;
// Setup pic_order_cnt_type based on use of B frames.
// Note! pic_order_cnt_type == 1 is not implemented
// The following are not transmitted in either case below, and are
// just initialized here to be nice.
m_SeqParamSet.delta_pic_order_always_zero_flag = 0;
m_SeqParamSet.offset_for_non_ref_pic = 0;
m_SeqParamSet.poffset_for_ref_frame = NULL;
m_SeqParamSet.num_ref_frames_in_pic_order_cnt_cycle = 0;
if (m_info.B_frame_rate == 0 && m_info.coding_type == 0)
{
m_SeqParamSet.pic_order_cnt_type = 2;
m_SeqParamSet.log2_max_pic_order_cnt_lsb = 0;
// Right now this only supports simple P frame patterns (e.g. H264PPPP...)
} else {
//Ipp32s log2_max_poc = (Ipp32u)log(((Ipp64f)m_info.B_frame_rate +
// m_info.num_ref_to_start_code_B_slice)/log((Ipp64f)2) + 1) << 1;
Ipp32s log2_max_poc = (Ipp32s) (log((Ipp64f)((m_info.B_frame_rate<<((m_info.treat_B_as_reference==2)?1:0))+ m_info.num_ref_frames))
/ log(2.0)) + 3; // 3=1+1+1=round+multiply by 2 in counting+devide by 2 in comparison
m_SeqParamSet.log2_max_pic_order_cnt_lsb = IPP_MAX(log2_max_poc, 4);
if (m_SeqParamSet.log2_max_pic_order_cnt_lsb > 16)
{
VM_ASSERT(false);
m_SeqParamSet.log2_max_pic_order_cnt_lsb = 16;
}
m_SeqParamSet.pic_order_cnt_type = 0;
// Right now this only supports simple B frame patterns (e.g. IBBPBBP...)
}
m_SeqParamSet.num_ref_frames = m_info.num_ref_frames;
// Note! NO code after this point supports pic_order_cnt_type == 1
// Always zero because we don't support field encoding
m_SeqParamSet.offset_for_top_to_bottom_field = 0;
m_SeqParamSet.frame_mbs_only_flag = (m_info.coding_type)? 0: 1;
m_SeqParamSet.gaps_in_frame_num_value_allowed_flag = 0;
m_SeqParamSet.mb_adaptive_frame_field_flag = m_info.coding_type>1;
// If set to 1, 8x8 blocks in Direct Mode always use 1 MV,
// obtained from the "outer corner" 4x4 block, regardless
// of how the CoLocated 8x8 is split into subblocks. If this
// is 0, then the 8x8 in Direct Mode is subdivided exactly as
// the Colocated 8x8, with the appropriate number of derived MVs.
m_SeqParamSet.direct_8x8_inference_flag =
m_info.use_direct_inference || !m_SeqParamSet.frame_mbs_only_flag ? 1 : 0;
// Picture Dimensions in MBs
m_SeqParamSet.frame_width_in_mbs = ((m_info.info.clip_info.width+15)>>4);
m_SeqParamSet.frame_height_in_mbs = ((m_info.info.clip_info.height+(16<<(1 - m_SeqParamSet.frame_mbs_only_flag)) - 1)>>4) >> (1 - m_SeqParamSet.frame_mbs_only_flag);
Ipp32s frame_height_in_mbs = m_SeqParamSet.frame_height_in_mbs << (1 - m_SeqParamSet.frame_mbs_only_flag);
// If the width & height in MBs doesn't match the image dimensions then do
// some cropping in the decoder
if (((m_SeqParamSet.frame_width_in_mbs<<4) != m_info.info.clip_info.width) ||
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -