📄 umc_h264_dec_conversion.cpp
字号:
else ps = UMC_END_OF_STREAM; } break; default : if (vm_thread_is_valid(&m_hConversionThread)) ConvertFrameAsync(dst, nStage); break; }#endif // USE_SEI return ps;} // Status H264VideoDecoder::OutputFrame(MediaData *dst, Ipp32u nStage)Status H264VideoDecoder::ConvertFrameAsync(MediaData *dst, Ipp32u nStage){ Status res = UMC_OK; VideoData *pVData = DynamicCast<VideoData> (dst); if ((m_pDisplayFrame) && (pVData)) { switch (nStage) { case BEFORE_DECODING: { H264DecYUVWorkSpace *pDisplayBuffer; pDisplayBuffer = m_pDisplayFrame; // Perform output color conversion and video effects, if we didn't // already write our output to the application's buffer. if (NULL != m_pConverter) InitColorConverter(m_pDisplayFrame, 0); m_Convert.lpDest0 = (Ipp8u *) pVData->m_lpDest[0]; m_Convert.PitchDest0 = pVData->m_lPitch[0]; m_Convert.lpDest1 = NULL; m_Convert.PitchDest1 = 0; m_Convert.lpDest2 = NULL; m_Convert.PitchDest2 = 0; m_Convert.lpSource0 = pDisplayBuffer->m_pYPlane; m_Convert.lpSource2 = pDisplayBuffer->m_pUPlane; m_Convert.lpSource1 = pDisplayBuffer->m_pVPlane; m_Convert.PitchSource0 = pDisplayBuffer->pitch(); m_Convert.PitchSource1 = pDisplayBuffer->pitch(); m_Convert.PitchSource2 = pDisplayBuffer->pitch(); vm_event_signal(&m_hStartConversion); } break; case AFTER_DECODING: case ABSENT_DECODING: case ERROR_DECODING: // wait conversion thread vm_event_wait(&m_hStopConversion); break; case PREPARE_DECODING: default: break; } } return res;} // Status H264VideoDecoder::ConvertFrameAsync(MediaData *dst, Ipp32u nStage)unsigned int H264VideoDecoder::ConvertFrameAsyncSecondThread(void *p){ H264VideoDecoder *pObj = (H264VideoDecoder *) p; if (pObj) { // wait for start of conversion vm_event_wait(&pObj->m_hStartConversion); while (false == pObj->m_bQuit) { // do conversion if (NULL != pObj->m_pConverter) pObj->m_pConverter->ConvertFrame(&pObj->m_Convert); // set stop event vm_event_signal(&pObj->m_hStopConversion); // wait for start of conversion vm_event_wait(&pObj->m_hStartConversion); } } return 0x05deccc;} // unsigned int H264VideoDecoder::ConvertFrameAsyncSecondThread(void *p)Status H264VideoDecoder::ConvertFrame(MediaData *dst){ VideoData *pVData = DynamicCast<VideoData> (dst); Status ps = UMC_OK; H264DecYUVWorkSpace *pDisplayBuffer; VM_ASSERT(m_pDisplayFrame); pDisplayBuffer = m_pDisplayFrame; // Perform output color conversion and video effects, if we didn't // already write our output to the application's buffer. if (pVData) { if (NULL != m_pConverter) { InitColorConverter(m_pDisplayFrame, 0); } m_Convert.lpDest0 = (Ipp8u *) pVData->m_lpDest[0]; m_Convert.PitchDest0 = pVData->m_lPitch[0]; m_Convert.lpDest1 = NULL; m_Convert.PitchDest1 = 0; m_Convert.lpDest2 = NULL; m_Convert.PitchDest2 = 0; m_Convert.lpSource0 = pDisplayBuffer->m_pYPlane; m_Convert.lpSource2 = pDisplayBuffer->m_pUPlane; m_Convert.lpSource1 = pDisplayBuffer->m_pVPlane; m_Convert.PitchSource0 = pDisplayBuffer->pitch(); m_Convert.PitchSource1 = pDisplayBuffer->pitch(); m_Convert.PitchSource2 = pDisplayBuffer->pitch(); if (NULL != m_pConverter) m_pConverter->ConvertFrame(&m_Convert); pVData->SetVideoParameters(m_Convert.ConversionInit.SizeDest.width, m_Convert.ConversionInit.SizeDest.height, m_Convert.ConversionInit.FormatDest); } return ps;} // Status H264VideoDecoder::ConvertFrame(MediaData *dst)#ifdef USE_SEIStatus H264VideoDecoder::OutputHalfFrame(H264DecoderFrame *pDisplayFrame, MediaData *dst, Ipp8u WhichField){ VideoData *pVData = DynamicCast<VideoData> (dst); Status ps = UMC_OK; H264DecYUVWorkSpace *pDisplayBuffer; for (;;) // Not really a loop, just use break instead of goto on error { if (!pDisplayFrame) { ps = UMC_BAD_STREAM; break; } pDisplayBuffer = pDisplayFrame; // Perform output color conversion and video effects, if we didn't // already write our output to the application's buffer. if (pDisplayBuffer && pVData) { // we always use color space converter // if (NULL == m_pConverter) // m_pConverter = new ColorSpaceConverter(); // if (NULL == m_pConverter) // return UMC_NOT_INITIALIZED; // else if (NULL != m_pConverter) { InitColorConverter(pDisplayFrame,1); } //and in the current frame Ipp8u cur_field,dest_field; cur_field = pDisplayFrame->m_PictureStructureFromSEI==6 || pDisplayFrame->m_PictureStructureFromSEI==4; dest_field = WhichField; m_Convert.lpDest0 = (Ipp8u *) pVData->m_lpDest[0]+dest_field*pVData->m_lPitch[0]; m_Convert.PitchDest0 = pVData->m_lPitch[0]*2; m_Convert.lpDest1 = NULL; m_Convert.PitchDest1 = 0; m_Convert.lpDest2 = NULL; m_Convert.PitchDest2 = 0; m_Convert.lpSource0 = pDisplayBuffer->m_pYPlane+cur_field*pDisplayBuffer->pitch(); m_Convert.lpSource2 = pDisplayBuffer->m_pUPlane+cur_field*pDisplayBuffer->pitch(); m_Convert.lpSource1 = pDisplayBuffer->m_pVPlane+cur_field*pDisplayBuffer->pitch(); m_Convert.PitchSource0 = pDisplayBuffer->pitch()*2; m_Convert.PitchSource1 = pDisplayBuffer->pitch()*2; m_Convert.PitchSource2 = pDisplayBuffer->pitch()*2; if (NULL != m_pConverter) { m_pConverter->ConvertFrame(&m_Convert); } //pDisplayFrame->setWasDisplayed(); } break; } return ps;} // Status H264VideoDecoder::OutputHalfFrame(H264DecoderFrame *pDisplayFrame, MediaData *dst, Ipp8u WhichField)Status H264VideoDecoder::OutputReverseFrame(H264DecoderFrame *pDisplayFrame, MediaData *dst){ VideoData *pVData = DynamicCast<VideoData> (dst); Status ps = UMC_OK; H264DecYUVWorkSpace *pDisplayBuffer; for (;;) // Not really a loop, just use break instead of goto on error { if (!pDisplayFrame) { ps = UMC_BAD_STREAM; break; } pDisplayBuffer = pDisplayFrame; // Perform output color conversion and video effects, if we didn't // already write our output to the application's buffer. if (pDisplayBuffer && pVData) { // we always use color space converter // if (NULL == m_pConverter) // m_pConverter = new ColorSpaceConverter(); // if (NULL == m_pConverter) // return UMC_NOT_INITIALIZED; // else if (NULL != m_pConverter) { InitColorConverter(pDisplayFrame,1); } //and in the current frame m_Convert.lpDest0 = (Ipp8u *) pVData->m_lpDest[0]+pVData->m_lPitch[0]; m_Convert.PitchDest0 = pVData->m_lPitch[0]*2; m_Convert.lpDest1 = NULL; m_Convert.PitchDest1 = 0; m_Convert.lpDest2 = NULL; m_Convert.PitchDest2 = 0; m_Convert.lpSource0 = pDisplayBuffer->m_pYPlane; m_Convert.lpSource2 = pDisplayBuffer->m_pUPlane; m_Convert.lpSource1 = pDisplayBuffer->m_pVPlane; m_Convert.PitchSource0 = pDisplayBuffer->pitch()*2; m_Convert.PitchSource1 = pDisplayBuffer->pitch()*2; m_Convert.PitchSource2 = pDisplayBuffer->pitch()*2; if (NULL != m_pConverter) { m_pConverter->ConvertFrame(&m_Convert); } m_Convert.lpDest0 = (Ipp8u *) pVData->m_lpDest[0]; m_Convert.PitchDest0 = pVData->m_lPitch[0]*2; m_Convert.lpDest1 = NULL; m_Convert.PitchDest1 = 0; m_Convert.lpDest2 = NULL; m_Convert.PitchDest2 = 0; m_Convert.lpSource0 = pDisplayBuffer->m_pYPlane+pDisplayBuffer->pitch(); m_Convert.lpSource2 = pDisplayBuffer->m_pUPlane+pDisplayBuffer->pitch(); m_Convert.lpSource1 = pDisplayBuffer->m_pVPlane+pDisplayBuffer->pitch(); m_Convert.PitchSource0 = pDisplayBuffer->pitch()*2; m_Convert.PitchSource1 = pDisplayBuffer->pitch()*2; m_Convert.PitchSource2 = pDisplayBuffer->pitch()*2; if (NULL != m_pConverter) { m_pConverter->ConvertFrame(&m_Convert); } //pDisplayFrame->setWasDisplayed(); } break; } return ps;} // Status H264VideoDecoder::OutputReverseFrame(H264DecoderFrame *pDisplayFrame, MediaData *dst)#endif // USE_SEI} // namespace UMC
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -