📄 umc_h264_task_supplier.cpp
字号:
m_parsedDataLength = totalSize;
}
// Reassign our internal pointers if need be
if (m_paddedParsedDataSize.width != desiredPaddedSize.width ||
m_paddedParsedDataSize.height != desiredPaddedSize.height)
{
m_paddedParsedDataSize = desiredPaddedSize;
size_t offset = 0;
m_pMBMap = align_pointer<Ipp8u *> (m_pParsedData);
offset += uMBMapSize;
if (offset & 0x7)
offset = (offset + 7) & ~7;
next_mb_tables[0] = align_pointer<H264DecoderMBAddr *> (m_pParsedData + offset);
offset += next_mb_size;
if (offset & 0x7)
offset = (offset + 7) & ~7;
next_mb_tables[1] = align_pointer<H264DecoderMBAddr *> (m_pParsedData + offset);
offset += next_mb_size;
//initialize first table
Ipp32s i;
for (i = 0; i < uMBMapSize; i++)
next_mb_tables[0][i] = i + 1; // simple linear scan
// Note that most of the slice data buffer will be usually unused,
// as it is allocated for worst case. Only the first few entriies
// (and frequently only the first) are typically used.
}
return umcRes;
}
void TaskSupplier::DeallocateBuffers()
{
if (m_pParsedData)
{
// Free the old buffer.
m_pMemoryAllocator->Unlock(m_midParsedData);
m_pMemoryAllocator->Free(m_midParsedData);
m_pParsedData = 0;
m_midParsedData = 0;
}
m_parsedDataLength = 0;
m_paddedParsedDataSize.width = 0;
m_paddedParsedDataSize.height = 0;
}
H264DecoderFrame *TaskSupplier::GetFreeFrame(void)
{
H264DecoderFrame *pFrame = 0;
// Traverse list for next disposable frame
pFrame = m_pDecodedFramesList->GetOldestDisposable();
VM_ASSERT(!pFrame || pFrame->GetBusyState() == 0);
// Did we find one?
if (NULL == pFrame)
{
if (m_pDecodedFramesList->countAllFrames() > m_dpbSize + m_DPBSizeEx)
{
return 0;
}
// Didn't find one. Let's try to insert a new one
pFrame = new H264DecoderFrameExtension(m_pMemoryAllocator);
if (NULL == pFrame)
return 0;
m_pDecodedFramesList->append(pFrame);
}
pFrame->GetAU(0)->SetHeaps(&m_Heap, &m_SlicesHeap);
pFrame->GetAU(1)->SetHeaps(&m_Heap, &m_SlicesHeap);
pFrame->Reset();
// Set current as not displayable (yet) and not outputted. Will be
// updated to displayable after successful decode.
pFrame->unsetWasOutputted();
pFrame->unSetisDisplayable();
pFrame->SetBusyState(2);
pFrame->SetSkipped(false);
pFrame->SetFrameExistFlag(true);
if (m_pCurrentFrame == pFrame)
m_pCurrentFrame = 0;
m_UIDFrameCounter++;
pFrame->m_UID = m_UIDFrameCounter;
return pFrame;
}
Status TaskSupplier::DecodeSEI(MediaDataEx::_MediaDataEx * pSource, H264MemoryPiece * pMem)
{
if (m_CurrentSeqParamSet == -1)
return UMC_OK;
H264Bitstream bitStream;
try
{
bitStream.Reset((Ipp8u*)pMem->GetPointer() + pSource->offsets[0],
pSource->offsets[1] - pSource->offsets[0]);
Status umcRes = UMC_OK;
while ((UMC_OK == umcRes) &&
(0 == bitStream.GetSCP()))
{
umcRes = bitStream.AdvanceToNextSCP();
}
NAL_Unit_Type uNALUnitType;
Ipp8u uNALStorageIDC;
bitStream.GetNALUnitType(uNALUnitType, uNALStorageIDC);
do
{
H264SEIPayLoad m_SEIPayLoads;
/*Ipp32s target_sps =*/ bitStream.ParseSEI(&(m_Headers.m_SeqParamSet[0]),
m_CurrentSeqParamSet, &m_SEIPayLoads);
if (m_SEIPayLoads.payLoadType == SEI_USER_DATA_REGISTERED_TYPE)
{
m_UserData = m_SEIPayLoads;
m_SEIPayLoads.user_data = 0;
}
else
{
H264SEIPayLoad * spl = m_Headers.GetSEISet(m_SEIPayLoads.payLoadType);
if (m_Headers.AddSEISet(&m_SEIPayLoads, (!m_pCurrentFrame || m_pCurrentFrame->GetBusyState() < 2) ))
{
NotifiersChain *notif = m_pCurrentFrame->GetNotifiersChain();
notif->AddNotifier(new notifier2<Headers, Ipp32s, void*>(&m_Headers, &Headers::Signal, 3, spl));
}
}
} while (bitStream.More_RBSP_Data());
} catch(...)
{
// nothing to do just catch it
}
/*if (target_sps>=0 || m_bHasSEI)
{
if (target_sps<0) target_sps = m_SEITargetSPS; //inferred from previous tail
//process sei mesage
H264SEIPayLoad *spl = &m_SEIPayLoads[target_sps];
H264SeqParamSet *sps = &m_SeqParamSet[target_sps];
if (spl->payLoadType==1) //pi_timing
{
if (sps->pic_struct_present_flag)
{
const Ipp8u PicStructToRepCount[]=
{
2,1,1,2,2,3,3,4,6
};
pFrame->m_PictureStructureFromSEI=spl->SEI_messages.pic_timing.pic_struct;//no info
if (m_field_index)
pFrame->m_RepeatCount+=PicStructToRepCount[pFrame->m_PictureStructureFromSEI];
else
pFrame->m_RepeatCount=PicStructToRepCount[pFrame->m_PictureStructureFromSEI];
}
}
}
else
{
pFrame->m_PictureStructureFromSEI=-1;//no info
pFrame->m_RepeatCount=2; // multiplied by 2
}*/
return UMC_OK;
}
Status TaskSupplier::DecodeHeaders(MediaDataEx::_MediaDataEx *pSource, H264MemoryPiece * pMem)
{
Status umcRes = UMC_OK;
H264Bitstream bitStream;
try
{
Ipp32s nNALUnitCount = 0;
for (nNALUnitCount = 0; nNALUnitCount < (Ipp32s) pSource->count; nNALUnitCount++)
{
bitStream.Reset((Ipp8u*)pMem->GetPointer() + pSource->offsets[nNALUnitCount],
pSource->offsets[nNALUnitCount + 1] - pSource->offsets[nNALUnitCount]);
while ((UMC_OK == umcRes) &&
(0 == bitStream.GetSCP()))
{
umcRes = bitStream.AdvanceToNextSCP();
}
if (UMC_OK != umcRes)
return UMC_ERR_INVALID_STREAM;
NAL_Unit_Type uNALUnitType;
Ipp8u uNALStorageIDC;
bitStream.GetNALUnitType(uNALUnitType, uNALStorageIDC);
switch(uNALUnitType)
{
// sequence parameter set
case NAL_UT_SPS:
{
H264SeqParamSet sps;
umcRes = bitStream.GetSequenceParamSet(&sps);
if (umcRes == UMC_OK)
{
DEBUG_PRINT((VM_STRING("debug headers SPS - %d \n"), sps.seq_parameter_set_id));
H264SeqParamSet * temp = m_Headers.GetSeqParamSet(sps.seq_parameter_set_id);
if (m_Headers.AddSeqParamSet(&sps, (!m_pCurrentFrame || m_pCurrentFrame->GetBusyState() < 2)))
{
NotifiersChain *notif = m_pCurrentFrame->GetNotifiersChain();
notif->AddNotifier(new notifier2<Headers, Ipp32s, void*>(&m_Headers, &Headers::Signal, 0, temp));
}
sps.poffset_for_ref_frame = 0; // avoid twice deleting
// DEBUG : (todo - implement copy constructor and assigment operator)
m_bSeqParamSetRead = true;
// Validate the incoming bitstream's image dimensions.
m_CurrentSeqParamSet = sps.seq_parameter_set_id;
umcRes = SetDPBSize();
m_maxDecFrameBuffering = sps.max_dec_frame_buffering ?
sps.max_dec_frame_buffering : m_dpbSize;
if (sps.pic_order_cnt_type == 2 || m_TrickModeSpeed != 1)
{
m_maxDecFrameBuffering = 0;
}
AllocateBuffers(false);
if (umcRes != UMC_OK)
return umcRes;
}
else
return UMC_ERR_INVALID_STREAM;
}
break;
case NAL_UT_SPS_EX:
{
H264SeqParamSetExtension sps_ex;
umcRes = bitStream.GetSequenceParamSetExtension(&sps_ex);
if (umcRes == UMC_OK)
{
H264SeqParamSetExtension * temp = m_Headers.GetSeqParamSetEx(sps_ex.seq_parameter_set_id);
if (m_Headers.AddSeqParamSetEx(&sps_ex, (!m_pCurrentFrame || m_pCurrentFrame->GetBusyState() < 2)))
{
NotifiersChain *notif = m_pCurrentFrame->GetNotifiersChain();
notif->AddNotifier(new notifier2<Headers, Ipp32s, void*>(&m_Headers, &Headers::Signal, 1, temp));
}
}
else
return UMC_ERR_INVALID_STREAM;
}
break;
// picture parameter set
case NAL_UT_PPS:
{
H264PicParamSet pps;
// set illegal id
pps.pic_parameter_set_id = MAX_NUM_PIC_PARAM_SETS;
// Get id
umcRes = bitStream.GetPictureParamSetPart1(&pps);
if (UMC_OK == umcRes)
{
H264SeqParamSet *pRefsps = m_Headers.GetSeqParamSet(pps.seq_parameter_set_id);
if (!pRefsps || pRefsps ->seq_parameter_set_id >= MAX_NUM_SEQ_PARAM_SETS)
return UMC_ERR_INVALID_STREAM;
// Get rest of pic param set
umcRes = bitStream.GetPictureParamSetPart2(&pps, pRefsps);
if (UMC_OK == umcRes)
{
DEBUG_PRINT((VM_STRING("debug headers PPS - %d - SPS - %d\n"), pps.pic_parameter_set_id, pps.seq_parameter_set_id));
H264PicParamSet * temp = m_Headers.GetPicParamSet(pps.pic_parameter_set_id);
if (m_Headers.AddPicParamSet(&pps, (!m_pCurrentFrame || m_pCurrentFrame->GetBusyState() < 2)))
{
NotifiersChain *notif = m_pCurrentFrame->GetNotifiersChain();
notif->AddNotifier(new notifier2<Headers, Ipp32s, void*>(&m_Headers, &Headers::Signal, 2, temp));
}
pps.SliceGroupInfo.t3.pSliceGroupIDMap = 0; // avoid twice deleting
// DEBUG : (todo - implement copy constructor and assigment operator)
m_bPicParamSetRead = true;
m_CurrentPicParamSet = pps.pic_parameter_set_id;
// reset current picture parameter set number
if (0 > m_CurrentPicParamSet)
{
m_CurrentPicParamSet = pps.pic_parameter_set_id;
m_CurrentSeqParamSet = pps.seq_parameter_set_id;
}
}
}
}
break;
default:
break;
}
};
}
catch(const h264_exception & ex)
{
return ex.GetStatus();
}
catch(...)
{
return UMC_ERR_INVALID_STREAM;
}
return UMC_OK;
} // Status TaskSupplier::DecodeHeaders(MediaDataEx::_MediaDataEx *pSource, H264MemoryPiece * pMem)
//////////////////////////////////////////////////////////////////////////////
// ProcessFrameNumGap
//
// A non-sequential frame_num has been detected. If the sequence parameter
// set field gaps_in_frame_num_value_allowed_flag is non-zero then the gap
// is OK and "non-existing" frames will be created to correctly fill the
// gap. Otherwise the gap is an indication of lost frames and the need to
// handle in a reasonable way.
//////////////////////////////////////////////////////////////////////////////
Status TaskSupplier::ProcessFrameNumGap(H264Slice *slice, Ipp32s field)
{
Status umcRes = UMC_OK;
H264SliceHeader *sliceHeader = slice->GetSliceHeader();
H264SeqParamSet* sps = slice->GetSeqParam();
Ipp32s uMaxFrameNum = (1<<sps->log2_max_frame_num);
Ipp32s frameNumGap;
if (sliceHeader->idr_flag)
return UMC_OK;
// Capture any frame_num gap
if (sliceHeader->frame_num != m_PrevFrameRefNum &&
sliceHeader->frame_num != (m_PrevFrameRefNum + 1) % uMaxFrameNum)
{
// note this could be negative if frame num wrapped
if (sliceHeader->frame_num > m_PrevFrameRefNum - 1)
{
frameNumGap = (sliceHeader->frame_num - m_PrevFrameRefNum - 1) % uMaxFrameNum;
}
else
{
frameNumGap = (uMaxFrameNum - (m_PrevFrameRefNum + 1 - sliceHeader->frame_num)) % uMaxFrameNum;
}
if (frameNumGap > m_dpbSize)
{
frameNumGap = m_dpbSize;
}
}
else
{
frameNumGap = 0;
return UMC_OK;
}
if (sps->gaps_in_frame_num_value_allowed_flag != 1)
return UMC_OK;
if (m_pCurrentFrame)
{
m_pCurrentFrame = 0;
return UMC_ERR_ALLOC;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -