📄 umc_dv_decoder.cpp
字号:
switch (m_lSizeSubSampled)
{
case 2:
StoreDVSegment = &DVVideoDecoder::StoreDVSDSegment_2s;
break;
case 4:
StoreDVSegment = &DVVideoDecoder::StoreDVSDSegment_4s;
break;
case 8:
StoreDVSegment = &DVVideoDecoder::StoreDVSDSegment_8s;
break;
default:
StoreDVSegment = &DVVideoDecoder::StoreDVSDSegment;
break;
}
break;
case DV25_STREAM:
switch (m_lSizeSubSampled)
{
case 2:
StoreDVSegment = &DVVideoDecoder::StoreDV25Segment_2s;
break;
case 4:
StoreDVSegment = &DVVideoDecoder::StoreDV25Segment_4s;
break;
case 8:
StoreDVSegment = &DVVideoDecoder::StoreDV25Segment_8s;
break;
default:
StoreDVSegment = &DVVideoDecoder::StoreDV25Segment;
break;
}
break;
}
}
Status DVVideoDecoder::GetFrame(MediaData *pInData, MediaData *pOutData)
{
VideoData *lpVData = DynamicCast<VideoData> (pOutData);
Status status = UMC_OK;
Ipp32u i;
if (!m_bInitSuccess)
return UMC_ERR_NOT_INITIALIZED;
if (NULL == lpVData)
return UMC_ERR_NULL_PTR;
if ((NULL == pInData))
{
return UMC_ERR_NOT_ENOUGH_DATA;
}
if (120000 > pInData->GetDataSize())
{
pInData->MoveDataPointer(pInData->GetDataSize());
return UMC_ERR_NOT_ENOUGH_DATA;
}
if (pInData->GetDataSize() < m_nSourceFrameSize)
{
//May be error in codec initialization
//For example: Init() called for NTSC (720x480), but stream in PAL (720x576)
Ipp8u *pCurrFrame;
Ipp32s byteDSF;
pCurrFrame = reinterpret_cast<Ipp8u *> (pInData->GetDataPointer());
//DSF is situated in each DIF sequence, in header section, in first DIF block - header block
//DSF = "0x00" NTSC ; DSF = "0x80" PAL
byteDSF = pCurrFrame[3] & 0x080;
CheckSetCorrectParams((int) byteDSF);
}
m_bDCOnly = false;
m_lSizeSubSampled = 1;
Ipp64f widthRatio = m_nWidth/(Ipp64f)lpVData->GetWidth();
Ipp64f heightRatio = m_nHeight/(Ipp64f)lpVData->GetHeight();
if (widthRatio == heightRatio) {
if (widthRatio == 2) {
m_lSizeSubSampled = 2;
} else if (widthRatio == 4) {
m_lSizeSubSampled = 4;
} else if (widthRatio == 8) {
m_lSizeSubSampled = 8;
m_bDCOnly = true;
}
}
Ipp32s dstWidth = m_nWidth/m_lSizeSubSampled;
Ipp32s dstHeight = m_nHeight/m_lSizeSubSampled;
// select frame type and set right function
SelectStoreFunction(pInData->GetDataPointer());
if (!m_PostProcessing) {
m_PostProcessing = m_allocatedPostProcessing = createVideoProcessing();
}
if (m_allocatedPostProcessing /* post processing is default */ &&
YUY2 == lpVData->GetColorFormat() &&
dstWidth == lpVData->GetWidth() &&
dstHeight == lpVData->GetHeight())
{
m_lpDestination = reinterpret_cast<Ipp8u *> (lpVData->GetPlanePointer(0));
m_nPitch = lpVData->GetPlanePitch(0);
} else {
m_lpvInternalFrameBuffer = (Ipp8u*) m_pMemoryAllocator->Lock(m_InternalFrameBufferMID);
if (NULL == m_lpvInternalFrameBuffer)
return UMC_ERR_NULL_PTR;
m_lpDestination = m_lpvInternalFrameBuffer;
m_nPitch = align_value<Ipp32u> (2 * m_nWidth, WIDTH_ALIGN);
}
m_lpSource = reinterpret_cast<Ipp8u *> (pInData->GetDataPointer());
Ipp8u *pShortBlocks = (Ipp8u*)m_pMemoryAllocator->Lock(m_DCTBlocksBufferMID);
if (NULL == pShortBlocks)
return UMC_ERR_NULL_PTR;
for (i = 0;i < m_nNumberOfThreads;i += 1)
m_ppShortBlocks[i] = (Ipp16u *) (pShortBlocks + SIZE_OF_VIDEO_SEGMENT * i);
m_lpADequantizeTable = (Ipp16u*)m_pMemoryAllocator->Lock(m_DequantizeTableMID);
if(m_lpADequantizeTable == NULL)
return UMC_ERR_NULL_PTR;
// start additional thread(s)
for (i = 1;i < m_nNumberOfThreads;i += 1)
vm_event_signal(m_lpStartEvent + i);
DecompressSegment(0);
// wait additional thread(s)
for (i = 1;i < m_nNumberOfThreads;i += 1)
vm_event_wait(m_lpStopEvent + i);
m_pMemoryAllocator->Unlock(m_DCTBlocksBufferMID);
#ifdef _DEBUG
memset(m_ppShortBlocks, 0, sizeof(Ipp16u*) * m_nNumberOfThreads);
#endif
m_pMemoryAllocator->Unlock(m_DequantizeTableMID);
m_lpADequantizeTable = NULL;
lpVData->SetFrameType(I_PICTURE);
lpVData->SetTime(pInData->GetTime());
if (m_LastDecodedFrame.GetColorFormat() != YUY2 ||
m_LastDecodedFrame.GetWidth() != dstWidth ||
m_LastDecodedFrame.GetHeight() != dstHeight)
{
m_LastDecodedFrame.Init(dstWidth,
dstHeight,
YUY2);
}
m_LastDecodedFrame.SetFrameType(I_PICTURE);
m_LastDecodedFrame.SetTime(pInData->GetTime());
m_LastDecodedFrame.SetPlanePointer((void*)m_lpDestination, 0);
m_LastDecodedFrame.SetPlanePitch(m_nPitch, 0);
if (m_lpDestination == m_lpvInternalFrameBuffer)
{
status = m_PostProcessing->GetFrame(&m_LastDecodedFrame, lpVData);
m_pMemoryAllocator->Unlock(m_InternalFrameBufferMID);
m_lpvInternalFrameBuffer = NULL;
}
pInData->MoveDataPointer(m_nSourceFrameSize);
if (SYSTEM_525 == m_nSystem) {
pInData->SetTime(pInData->GetTime() + 0.0333666);
} else {
pInData->SetTime(pInData->GetTime() + 1.0/25);
}
return status;
} // Status DVVideoDecoder::GetFrame(MediaData *pInData, MediaData *pOutData)
void DVVideoDecoder::DecompressSegment(Ipp32u nThreadNum)
{
Ipp32u i, k;
Ipp32u i_start, i_stop;
Ipp32u nTemp;
Ipp8u *lpSrc;
// check error(s)
if (nThreadNum >= m_nNumberOfThreads)
return;
// set working i & k
nTemp = m_nMaxNumberOfDIFSequences;
i_start = (nTemp * (nThreadNum)) / m_nNumberOfThreads;
i_stop = (nTemp * (nThreadNum + 1)) / m_nNumberOfThreads;
Ipp16u *lpsBlocks = m_ppShortBlocks[nThreadNum];
if (false == m_bDCOnly)
{
Ipp32u BlParamBuffer[30];
Ipp32u b_num, mb_num, block_type, block_quant_class, qno, bl_index, eob;
Ipp16u *lpsTable;
for (i = i_start;i < i_stop;i += 1)
{
for (k = 0;k < 27;k += 1)
{
// get source pointer
lpSrc = m_lpSource + SIZE_OF_DV_SEGMENT * (i * 150 + 6 + (k / 3 + 1) + k * 5);
//reset working block
memset(lpsBlocks, 0, SIZE_OF_VIDEO_SEGMENT);
// start video decompressing
if(m_lSizeSubSampled == 2)
ippiHuffmanDecodeSegmentOnePass_DV_8u16s((Ipp8u *) lpSrc,
(Ipp32u *) _INTERNAL_DEZIGZAG_TABLE_0,
(Ipp32u *) m_pHuffTable,
(Ipp16s *) lpsBlocks,
(Ipp32u *) BlParamBuffer,
64);
else
ippiHuffmanDecodeSegment_DV_8u16s((Ipp8u *) lpSrc,
(Ipp32u *) _INTERNAL_DEZIGZAG_TABLE_0,
(Ipp32u *) m_pHuffTable,
(Ipp16s *) lpsBlocks,
(Ipp32u *) BlParamBuffer);
// do dequantize and iDCT
for (mb_num = 0;mb_num < 5;mb_num++)
{
// get quantization number
qno = (BlParamBuffer[mb_num * 6] >> 16) & 0x0F;
// decompress each block
for (b_num = 0;b_num < 6;b_num++)
{
block_quant_class = (BlParamBuffer[mb_num * 6 + b_num] >> 4) & 0x03;
block_type = (BlParamBuffer[mb_num * 6 + b_num] >> 6) & 0x01;
bl_index = (BlParamBuffer[mb_num * 6 + b_num] >> 8) & 0xff;
eob = BlParamBuffer[mb_num * 6 + b_num] & 0x01;
// get beginning of arrays of dequantize tables
lpsTable = m_lpADequantizeTable +
// get needed array of tables, depending on block class
(block_type * 64 * 14) +
// get offset of needed table, depending on quantization class
// & quantization number
64 * m_lpADequantizeLineTable[qno + block_quant_class * 16];
// do inverse and adaptive dequantization
ippiQuantInv_DV_16s_C1I((Ipp16s *) (lpsBlocks + 64 * (b_num + 6 * mb_num)),
(Ipp16s *) lpsTable);
// do iDCT
if (0 == block_type)
{
if(m_lSizeSubSampled == 2)
ippiDCT8x4x2To4x4Inv_DV_16s_C1I((Ipp16s *) (lpsBlocks + 64 * (b_num +6 * mb_num)));
else if ((eob == 0)||( 10 > bl_index))
{
ippiDCT8x8Inv_4x4_16s_C1I((Ipp16s *) (lpsBlocks + 64 * (b_num + 6 * mb_num)));
}
else
ippiDCT8x8Inv_16s_C1I((Ipp16s *) (lpsBlocks + 64 * (b_num + 6 * mb_num)));
}
else
{
if(m_lSizeSubSampled == 2)
ippiDCT8x4x2To4x4Inv_DV_16s_C1I((Ipp16s *) (lpsBlocks + 64 * (b_num +6 * mb_num)));
else
ippiDCT2x4x8Inv_16s_C1I((Ipp16s *) (lpsBlocks + 64 * (b_num +6 * mb_num)));
}
}
}
// store data to memory
(this->*(StoreDVSegment))(i, k, nThreadNum);
}
}
}
else
{
for (i = i_start;i < i_stop;i += 1)
{
for (k = 0;k < 27;k += 1)
{
// get source pointer
lpSrc = m_lpSource + SIZE_OF_DV_SEGMENT * (i * 150 + 6 + (k / 3 + 1) + k * 5);
HuffmanDecodeSegment_DV_DC_only(lpSrc, (Ipp16u*)lpsBlocks);
// store data to memory
(this->*StoreDVSegment)(i, k, nThreadNum);
}
}
}
} // void DVVideoDecoder::DecompressSegment(Ipp32u ThreadNum)
Status DVVideoDecoder::GetPerformance(Ipp64f *perf)
{
if (perf)
*perf = 1.0;
return UMC_OK;
} // Status DVVideoDecoder::GetPerformance(Ipp64f *perf)
Status DVVideoDecoder::Reset(void)
{
if (!m_bInitSuccess)
return UMC_ERR_NOT_INITIALIZED;
else
return UMC_OK;
} // Status DVVideoDecoder::Reset(void)
const Ipp64f MINIMAL_DOUBLE_DIFFERENCE = 1.0/1000000.0;
inline
bool IsEqual(Ipp64f dOne, Ipp64f dTwo)
{
if (fabs(dOne - dTwo) <= MINIMAL_DOUBLE_DIFFERENCE)
return true;
return false;
} //bool IsEqual(Ipp64f dOne, Ipp64f dTwo)
Ipp32u VM_THREAD_CALLCONVENTION DVVideoDecoder::ThreadWorkingRoutine(void *lpv)
{
THREAD_ID *lpThreadId;
DVVideoDecoder *lpOwner;
// check error(s)
if (NULL == lpv)
return 0x0bad;
lpThreadId = reinterpret_cast<THREAD_ID *> (lpv);
lpOwner = reinterpret_cast<DVVideoDecoder *> (lpThreadId->m_lpOwner);
// wait for start
vm_event_wait(lpOwner->m_lpStartEvent + lpThreadId->m_nNumber);
while (VM_TIMEOUT == vm_event_timed_wait(&(lpOwner->m_Quit), 0))
{
// decompress part of frame
lpOwner->DecompressSegment(lpThreadId->m_nNumber);
// set finish
vm_event_signal(lpOwner->m_lpStopEvent + lpThreadId->m_nNumber);
// wait for start
vm_event_wait(lpOwner->m_lpStartEvent + lpThreadId->m_nNumber);
};
return lpThreadId->m_nNumber;
} // Ipp32u DVVideoDecoder::ThreadWorkingRoutine(void *lpv)
Status DVVideoDecoder::GetInfo(BaseCodecParams* info)
{
VideoDecoderParams *pParams;
if(info == NULL)
return UMC_ERR_NULL_PTR;
if (m_bInitSuccess == false)
return UMC_ERR_NOT_INITIALIZED;
pParams = DynamicCast<VideoDecoderParams> (info);
if ((NULL != pParams) && (m_bInitSuccess == true) ) {
pParams->info = m_ClipInfo;
pParams->lpMemoryAllocator = m_pMemoryAllocator;
pParams->pPostProcessing = m_PostProcessing;
pParams->numThreads = m_nNumberOfThreads;
}
return UMC_OK;
} // Status DVVideoDecoder::GetInfo(void)
} // end namespace UMC
#endif // UMC_ENABLE_DV_VIDEO_DECODER
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -