📄 umc_vc1_video_decoder.cpp
字号:
for (i = 0; i < m_iThreadDecoderNum; i += 1)
{
m_pdecoder[i] = new VC1ThreadDecoder();
if (NULL == m_pdecoder[i])
return UMC_ERR_ALLOC;
}
for (i = 0;i < m_iThreadDecoderNum;i += 1)
{
if (UMC_OK != m_pdecoder[i]->Init(m_pContext,i,m_pStore,m_va))
return UMC_ERR_INIT;
}
}
#ifdef VC1_THREAD_STATISTIC
{
Ipp32u NumEntries = m_pContext->m_seqLayerHeader->widthMB*9*4*2; // 9 - types of works,
// 4 - number of calling (getNext, start proc, end proc, addPerfomed )
// 2 - for possible sleep and wake states
m_eEntryArray = (VC1ThreadEntry**)ippsMalloc_8u(sizeof(VC1ThreadEntry*)*m_iThreadDecoderNum);
for (i = 0;i < m_iThreadDecoderNum;i += 1)
{
m_eEntryArray[i] = (VC1ThreadEntry*)ippsMalloc_8u(sizeof(VC1ThreadEntry)*NumEntries);
if (!m_eEntryArray[i])
return UMC_ERR_INIT;
m_pdecoder[i]->m_pJobSlice->m_Statistic = new VC1ThreadStatistic(i,m_eEntryArray[i],NumEntries);
}
}
#endif
#ifdef _PROJECT_STATISTICS_
TimeStatisticsStructureInitialization();
VC1VideoDecoderParams *VC1Init = DynamicCast<VC1VideoDecoderParams, BaseCodecParams>(pInit);
if(VC1Init)
m_timeStatistics->streamName = VC1Init->streamName;
#endif
//internal exception initialization
vc1_except_profiler::GetEnvDescript(smart_recon,FrameLevel);
return umcRes;
}
Status VC1VideoDecoder::InitVAEnvironment(Ipp32u frameSize)
{
if (!m_va)
{
m_iMaxFramesInProcessing = m_iThreadDecoderNum + m_iThreadDecoderNum * VC1FRAMEPARALLELPAIR;//m_iMaxFramesInProcessing should be > 1
m_pContext->m_frmBuff.m_pFrames = new Frame[m_iMaxFramesInProcessing + VC1NUMREFFRAMES];
if (!m_pContext->m_frmBuff.m_pFrames)
return UMC_ERR_ALLOC;
if (!m_pContext->m_frmBuff.m_pFrames[0].m_pAllocatedMemory )
{
if(m_pMemoryAllocator->Alloc(&m_pNewMemID, 4*(m_iMaxFramesInProcessing + VC1NUMREFFRAMES)*frameSize, // 2 - references frames !!!!!
UMC_ALLOC_PERSISTENT,16) != UMC_OK )
{
Close();
return UMC_ERR_ALLOC;
}
m_pContext->m_frmBuff.m_pFrames[0].m_pAllocatedMemory = (Ipp8u*)m_pMemoryAllocator->Lock(m_pNewMemID);
if(m_pContext->m_frmBuff.m_pFrames[0].m_pAllocatedMemory == NULL)
return UMC_ERR_ALLOC;
}
}
return UMC_OK;
}
Status VC1VideoDecoder::GetFrame(MediaData* in, MediaData* out)
{
Status umcRes = UMC_OK;
static Ipp32s frameCount = 0;
VideoDecoderParams params;
Ipp32u readSize = 0;
static bool bLastFrameNeedDisplay = true;
if(out == NULL)
return UMC_ERR_NULL_PTR;
VideoData *out_data = DynamicCast<VideoData, MediaData>(out);
if(in!=NULL && (Ipp32u)in->GetDataSize() == 0)
return UMC_ERR_NOT_ENOUGH_DATA;
if(!m_pContext)
return UMC_ERR_NOT_INITIALIZED;
STATISTICS_START_TIME(m_timeStatistics->startTime);
if (in == NULL)
{
if (out_data!=NULL && (umcRes == UMC_OK))
{
umcRes = VC1DecodeLastFrame(out_data);
out_data->SetTime(m_pts);
}
else
{
umcRes = UMC_ERR_NOT_ENOUGH_DATA;
if(out_data == NULL)
umcRes = UMC_ERR_NULL_PTR;
}
}
else
{
MediaDataEx *in_ex = DynamicCast<MediaDataEx,MediaData>(in);
if (!m_PostProcessing)
{
m_PostProcessing = m_allocatedPostProcessing = UMC::createVideoProcessing();
//m_PostProcessing = new ColorSpaceConversion();
}
if((in_ex!=NULL)||m_bIsWMPSplitter)
{
m_pContext->m_FrameSize = (Ipp32u)in->GetDataSize();
if(!((m_decoderFlags & FLAG_VDEC_4BYTE_ACCESS) == FLAG_VDEC_4BYTE_ACCESS))
{
#ifdef CREATE_ES
Ipp32u tdataSize = (in->GetDataSize())&0x00FFFFFF;
fwrite(&tdataSize, 1, 4, m_fPureVideo);
tdataSize = 0xFFFFFFFF; // timestamp
fwrite(&tdataSize, 1, 4, m_fPureVideo);
fwrite((Ipp8u*)in->GetBufferPointer(), 1, in->GetDataSize(), m_fPureVideo);
#endif
if (m_bIsWMPSplitter)
{
if ((*(Ipp32u*)(in->GetBufferPointer()) != 0x0E010000)&&
(m_pContext->m_seqLayerHeader->PROFILE == VC1_PROFILE_ADVANCED))
{
m_frameData->GetExData()->count = 0;
memset(m_frameData->GetExData()->offsets, 0,START_CODE_NUMBER*sizeof(Ipp32s));
memset(m_frameData->GetExData()->values, 0,START_CODE_NUMBER*sizeof(Ipp32s));
*(Ipp32u*)(m_dataBuffer) = 0x0D010000;
m_frameData->SetBufferPointer(m_dataBuffer+4,in->GetDataSize()+4);
*m_frameData->GetExData()->offsets = 0;
*m_frameData->GetExData()->values = 0x0D010000;
m_frameData->GetExData()->count = 1;
umcRes = GetStartCodes(in, m_frameData, &readSize);
m_frameData->SetBufferPointer(m_dataBuffer,in->GetDataSize()+4);
}
else if (m_pContext->m_seqLayerHeader->PROFILE != VC1_PROFILE_ADVANCED)
{
// simple main profile
m_frameData->SetBufferPointer(m_dataBuffer+4,in->GetDataSize()+4);
ippsCopy_8u((Ipp8u*)in->GetBufferPointer(), m_dataBuffer+4,
(Ipp32u)in->GetDataSize());
m_frameData->SetBufferPointer(m_dataBuffer,in->GetDataSize()+4);
}
else
{
// Entrypoint header for Advance profile
ippsCopy_8u((Ipp8u*)in->GetBufferPointer(), m_dataBuffer,
(Ipp32u)in->GetDataSize());
}
}
else
// non filter extern
ippsCopy_8u((Ipp8u*)in->GetBufferPointer(), m_dataBuffer,
(Ipp32u)in->GetDataSize());
if (!m_bIsWMPSplitter)
m_frameData->SetDataSize((Ipp32u)in_ex->GetDataSize());
else
m_frameData->SetDataSize((Ipp32u)in->GetDataSize()+4);
//need to swap data
SwapData((Ipp8u*)m_frameData->GetBufferPointer(), (Ipp32u)m_frameData->GetDataSize());
m_pContext->m_bitstream.pBitstream = (Ipp32u*)m_frameData->GetDataPointer();
m_pContext->m_pBufferStart = (Ipp8u*)m_frameData->GetBufferPointer();
}
else
{
m_pContext->m_bitstream.pBitstream = (Ipp32u*)in->GetDataPointer();
m_pContext->m_pBufferStart = (Ipp8u*)in->GetBufferPointer();
}
m_pContext->m_bitstream.bitOffset = 31;
if (!m_bIsWMPSplitter)
{
m_pContext->m_Offsets = in_ex->GetExData()->offsets;
m_pContext->m_values = in_ex->GetExData()->values;
}
else
{
m_pContext->m_Offsets = m_frameData->GetExData()->offsets;
m_pContext->m_values = m_frameData->GetExData()->values;
}
GetFrameSize(in);
if(!m_decoderInitFlag)
{
Ipp32u start_code = *((Ipp32u*)(m_pContext->m_pBufferStart));
if((start_code != (VC1_SequenceHeader|0x00000100))&& (m_ClipInfo.stream_subtype == VC1_VIDEO_VC1))
return UMC_ERR_NOT_ENOUGH_DATA;
params.m_pData = in;
params.lFlags = m_decoderFlags;
params.info.stream_type = m_ClipInfo.stream_type;
params.info.stream_subtype = m_ClipInfo.stream_subtype;
params.numThreads = m_iThreadDecoderNum;
if(m_allocatedPostProcessing == NULL)
params.pPostProcessing = m_PostProcessing;
else
m_PostProcessing = NULL;
Close();
Init(¶ms);
out_data->SetTime(m_pts);
STATISTICS_END_TIME (m_timeStatistics->startTime,
m_timeStatistics->endTime,
m_timeStatistics->totalTime);
return UMC_ERR_NOT_ENOUGH_DATA;
}
if(m_pContext->m_seqLayerHeader->PROFILE == VC1_PROFILE_ADVANCED)
{
Ipp32u start_code = *((Ipp32u*)(m_pContext->m_pBufferStart));
//skip start code
m_pContext->m_bitstream.pBitstream+=1;
//advanced profile
switch (start_code)
{
case VC1_SequenceHeader|0x00000100:
{
//need to change display index
m_pContext->m_frmBuff.m_iDisplayIndex = m_pStore->GetNextIndex();
m_pStore->FreeIndexQueue();
m_pStore->ResetDSQueue();
m_lFrameCount = 0;
m_pContext->m_frmBuff.m_iPrevIndex = -1;
m_pContext->m_frmBuff.m_iNextIndex= -1;
m_pContext->m_frmBuff.m_iCurrIndex = -1;
m_pContext->m_bIntensityCompensation = 0;
if (/*out_data!=NULL && */
m_pContext->m_frmBuff.m_iDisplayIndex>-1 && (umcRes == UMC_OK))
{
umcRes = WriteFrame(in,m_pContext,out_data);
umcRes = UMC_OK;
}
else
umcRes = UMC_ERR_NOT_ENOUGH_DATA;
params.m_pData = in;
params.lFlags = m_decoderFlags;
params.info.stream_type = m_ClipInfo.stream_type;
params.info.stream_subtype = m_ClipInfo.stream_subtype;
params.numThreads = m_iThreadDecoderNum;
if(m_allocatedPostProcessing == NULL)
params.pPostProcessing = m_PostProcessing;
else
m_PostProcessing = NULL;
Close();
Init(¶ms);
out_data->SetTime(m_pts);
return umcRes;
}
break;
case VC1_EndOfSequence|0x00000100:
{
//need to change display index
//m_pContext->m_frmBuff.m_iDisplayIndex = VC1TaskStore::GetInstance(VC1Routine)->GetNextIndex();
//VC1TaskStore::GetInstance(VC1Routine)->FreeIndexQueue();
//VC1TaskStore::GetInstance(VC1Routine)->ResetDSQueue();
//m_lFrameCount = 0;
//m_pContext->m_frmBuff.m_iPrevIndex = -1;
//m_pContext->m_frmBuff.m_iNextIndex= -1;
//m_pContext->m_frmBuff.m_iCurrIndex = -1;
//m_pContext->m_bIntensityCompensation = 0;
if (/*out_data!=NULL && */
m_pContext->m_frmBuff.m_iDisplayIndex>-1 && (umcRes == UMC_OK))
{
umcRes = WriteFrame(in,m_pContext,out_data);
umcRes = UMC_OK;
}
else
umcRes = UMC_ERR_NOT_INITIALIZED;
//m_decoderInitFlag = 0;
m_pContext->m_Offsets++;
m_pContext->m_values++;
umcRes = UMC_OK;
}
break;
case VC1_EntryPointHeader|0x00000100:
{
EntryPointLayer(m_pContext);
m_pContext->m_Offsets++;
m_pContext->m_values++;
if (!m_bIsWMPSplitter)
umcRes = UMC_ERR_NOT_ENOUGH_DATA;
else
umcRes = UMC_OK;
}
break;
case (VC1_FrameHeader|0x00000100):
{
#ifdef VC1_DEBUG_ON
VM_Debug::GetInstance(VC1DebugRoutine).vm_debug_frame(frameCount,0,NULL,1);
#endif
#ifdef SLICE_INFO
printf("frameCount =%d\n", frameCount);
#endif
++m_lFrameCount;
#ifdef VC1_DEBUG_ON
VM_Debug::GetInstance(VC1DebugRoutine).vm_debug_frame(-1,VC1_POSITION,
VM_STRING("frameCount = %d\n"), frameCount);
#endif
try //work with queue of frame descriptors and process every frame
{
umcRes = m_pSelfDecoder->VC1DecodeFrame(this,in,out_data);
}
catch (vc1_exception ex)
{
exception_type e_type = ex.get_exception_type();
if (e_type == internal_pipeline_error)
return UMC_ERR_FAILED;
}
#ifdef _PROJECT_STATISTICS_
m_timeStatistics->frameCount++;
#endif
}
break;
case VC1_Field|0x00000100:
case VC1_SliceLevelUserData|0x00000100:
case VC1_FieldLevelUserData|0x00000100:
case VC1_FrameLevelUserData|0x00000100:
case VC1_EntryPointLevelUserData|0x00000100:
case VC1_SequenceLevelUserData|0x00000100:
m_pContext->m_Offsets++;
m_pContext->m_values++;
umcRes = UMC_ERR_NOT_ENOUGH_DATA;
break;
default:
printf("incorrect start code suffix \n");
umcRes = UMC_ERR_SYNC;;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -