📄 avsync.cpp
字号:
// Ok, here is no more data in the splitter. Let's extract the rest // of decoded data from the decoder if (UMC::UMC_OK == umcRes && NULL == data.GetDataPointer()) { umcSplitRes = UMC::UMC_END_OF_STREAM; } if (UMC::UMC_END_OF_STREAM == umcSplitRes) { umcRes = UMC::UMC_OK; } out_data.SetTime(data.GetTime()); if (UMC::UMC_OK == umcRes) { if (UMC::UMC_END_OF_STREAM != umcSplitRes) { umcRes = GetFrameStub(&data, out_data, dfDecTime); } else // sweap reference frames from the decoder { umcRes = GetFrameStub(NULL, out_data, dfDecTime); } if(umcRes != UMC::UMC_OK) { vm_time_sleep(10);} } vm_debug_trace(0, VM_STRING("Decoder PTS: %lf\n"), out_data.GetTime()); //vm_debug_trace(-1, VM_STRING("%d\n"), // out_data.m_FrameType); } while ((UMC::UMC_NOT_ENOUGH_DATA == umcRes || UMC::UMC_NOT_FIND_SYNCWORD == umcRes) && !m_bStopFlag); if (((UMC::UMC_OK != umcRes) && (umcRes != UMC::UMC_WAIT_FOR_REPOSITION)) || m_bStopFlag) { break; } // Update statistic structure { m_Stat.dfConversionTime = m_ColorConverter.GetConversionTime(); m_Stat.dfDecodeTime += dfDecTime; m_Stat.uiFramesDecoded++; m_Stat.dfDecodeRate = (double)(vm_var64s)(m_Stat.uiFramesDecoded) / m_Stat.dfDecodeTime; if (0.0 != m_Stat.dfConversionTime) { m_Stat.dfConversionRate = (double)(vm_var64s)(m_Stat.uiFramesDecoded) / m_Stat.dfConversionTime; } else { m_Stat.dfConversionRate = 0.0; } } // Unlock video render surface if (bVideoRndrIsLocked) { umcRes = m_pVideoRender->UnLockInputBuffer(&out_data); bVideoRndrIsLocked = false; } m_bVideoPlaying = true; m_eventSyncPoint1Video.Set(); m_eventSyncPointVideo.Wait(); m_eventSyncPoint1Video.Reset(); if (umcRes == UMC::UMC_WAIT_FOR_REPOSITION) { umcRes = UMC::UMC_OK; } vm_time_sleep(0); } if(m_bVideoPlaying == false) m_bStopFlag = true; if (bVideoRndrIsLocked) { out_data.SetTime(-1.0); out_data.SetFrameType(UMC::NONE_PICTURE); umcRes = m_pVideoRender->UnLockInputBuffer(&out_data); //m_bVideoPlaying = true; } m_pVideoRender->Stop(); vm_debug_trace(4, VM_STRING("AVSync::VideoProc start exiting\n")); //to avoid deadlock during reposition m_eventSyncPoint1Video.Set();// m_bVideoPlaying = false; vm_debug_trace(4, VM_STRING("AVSync::VideoProc exit\n")); LOG(VM_STRING("VideoProc,-"));}#define LockBuffer(pFunction, pMedium, bStop) \{ \ do \ { \ umcRes = pFunction(pMedium); \ if ((UMC::UMC_NOT_ENOUGH_DATA == umcRes) || \ (UMC::UMC_NOT_ENOUGH_BUFFER == umcRes)) \ vm_time_sleep(5); \ } while ((false == bStop) && \ ((UMC::UMC_NOT_ENOUGH_DATA == umcRes) || \ (UMC::UMC_NOT_ENOUGH_BUFFER == umcRes))); \}template<class typeSource, class typeMedium>UMC::Status LockInputBuffer(typeSource *pSource, typeMedium *pMedium, bool *pbStop){ UMC::Status umcRes; do { umcRes = pSource->LockInputBuffer(pMedium); if (UMC::UMC_NOT_ENOUGH_BUFFER == umcRes) vm_time_sleep(5); } while ((false == *pbStop) && (UMC::UMC_NOT_ENOUGH_BUFFER == umcRes)); return umcRes;} // UMC::Status LockInputBuffer(typeSource *pSource, typeMedium *pMedium, bool *pbStop)template<class typeDestination, class typeMedium>UMC::Status LockOutputBuffer(typeDestination *pDestination, typeMedium *pMedium, bool *pbStop){ UMC::Status umcRes; do { umcRes = pDestination->LockOutputBuffer(pMedium); if (UMC::UMC_NOT_ENOUGH_DATA == umcRes) vm_time_sleep(5); } while ((false == *pbStop) && (UMC::UMC_NOT_ENOUGH_DATA == umcRes)); return umcRes;} // UMC::Status LockOutputBuffer(typeDestination *pDestination, typeMedium *pMedium, bool *pbStop)voidAVSync::AudioProc(){ //LOG (VM_STRING("AudioProc,+")); UMC::Status umcRes = UMC::UMC_OK;/* UMC::Status umcSplRes = UMC::UMC_OK;*/ UMC::MediaData ComprData; UMC::AudioData UncomprData; bool bSplitterIsEmpty = false; double dfStartTime = 0; vm_var32 uiComprSize = 0; vm_var32 uiShift = 0; // check error(s) assert(NULL != m_pSplitter); vm_debug_trace(4, VM_STRING("AVSync::AudioProc start\n")); // Continue passing data from the splitter to decoder and from // decoder to the render UncomprData.m_info = m_SplitterInfo.m_audio_info; while ((false == m_bStopFlag) && (false == bSplitterIsEmpty)) { // 1st step: obtain data from splitter LockBuffer(m_pSplitter->GetNextAudioData, &ComprData, m_bStopFlag); // check error(s) & end of stream if (UMC::UMC_END_OF_STREAM == umcRes) { bSplitterIsEmpty = true; ComprData.SetDataSize(0); ComprData.SetTime(-1); } else if(UMC::UMC_WAIT_FOR_REPOSITION == umcRes) { vm_time_sleep(10); m_eventSyncPoint1Audio.Set(); m_eventSyncPointAudio.Wait(); m_eventSyncPoint1Audio.Reset(); ComprData.SetDataSize(0); ComprData.SetTime(-1); UncomprData.SetDataSize(0); UncomprData.SetTime(-1); uiComprSize = 0; umcRes = m_pSplitter->GetNextAudioData(&ComprData); vm_debug_trace(-1, VM_STRING("After Repos ComprData.time = %lf\n"), ComprData.GetTime()); } else if (UMC::UMC_OK != umcRes) break; // save data size and data time uiComprSize = ComprData.GetDataSize(); dfStartTime = ComprData.GetTime(); // decode data and pass them to renderer uiShift = 0; do { // 2nd step: compressed data should be passed to the decoder first if (m_pDSAudioCodec) { UMC::MediaData buff; // get decoder's internal buffer umcRes = m_pDSAudioCodec->LockInputBuffer(&buff); // check error(s) if (UMC::UMC_OK != umcRes) break; // Copy compressed data to the decoder's buffer if (UMC::UMC_OK == umcRes) { vm_var32 uiDataToCopy = min((vm_var32) buff.GetBufferSize(), uiComprSize); memcpy(buff.GetDataPointer(), (Ipp8u*)ComprData.GetDataPointer() + uiShift, uiDataToCopy); buff.SetDataSize(uiDataToCopy); buff.SetTime(dfStartTime); umcRes = m_pDSAudioCodec->UnLockInputBuffer(&buff, (bSplitterIsEmpty) ? (UMC::UMC_END_OF_STREAM) : (UMC::UMC_OK)); // check error(s) if (UMC::UMC_OK != umcRes) break; uiShift += uiDataToCopy; uiComprSize -= uiDataToCopy; } } do { // wait until audio renderer will free enough intermal buffers umcRes = LockInputBuffer(m_pAudioRender, &UncomprData, (bool *) &m_bStopFlag); // check error(s) if (UMC::UMC_OK != umcRes) break; // move decoded data to the renderer // brunch for compressed data if (m_pDSAudioCodec) { double dfStart, dfEnd; UncomprData.SetDataSize(0); vm_tick ullDecTime = vm_time_get_tick(); umcRes = m_pDSAudioCodec->GetFrame(&UncomprData); ullDecTime = vm_time_get_tick() - ullDecTime; // check error(s) if (UMC::UMC_OK != umcRes) break; assert(UMC::UMC_UNSUPPORTED != umcRes); // calculate decoding time m_Stat.dfAudioDecodeTime += (double)(vm_var64s)(ullDecTime) / (double)(vm_var64s)m_lliFreq; UncomprData.GetTime(dfStart, dfEnd); m_Stat.dfAudioPlayTime += dfEnd - dfStart; } // branch for PCM data else { double dfStart = 0.0; double dfEnd = 0.0; if (0 == uiComprSize) break; vm_var32 uiDataToCopy = min(uiComprSize, UncomprData.GetBufferSize()); memcpy(UncomprData.GetDataPointer(), ((vm_byte*)ComprData.GetDataPointer()) + uiShift, uiDataToCopy); UncomprData.SetDataSize(uiDataToCopy); ComprData.GetTime(dfStart, dfEnd); double dfNorm = (dfEnd - dfStart) / (uiShift + uiComprSize); dfStart += dfNorm * uiShift; dfEnd = dfStart + dfNorm * uiDataToCopy; UncomprData.SetTime(dfStart, dfEnd); uiShift += uiDataToCopy; uiComprSize -= uiDataToCopy; vm_debug_trace(4, VM_STRING("AudioProc: start: %lf, stop %lf\n"), dfStart, dfEnd); } // call finalizing function if (UncomprData.GetDataSize()) { double dfEndTime; m_bAudioPlaying = true; vm_debug_trace(0, VM_STRING("taudio time: %f\n"), UncomprData.GetTime()); UncomprData.GetTime(dfStartTime, dfEndTime); m_Stat.dfAudioDecodeRate = (double)(m_Stat.dfAudioDecodeTime / m_Stat.dfAudioPlayTime); dfStartTime = dfEndTime; umcRes = m_pAudioRender->UnLockInputBuffer(&UncomprData); // check error(s) if (UMC::UMC_OK != umcRes) break; } } while (false == m_bStopFlag); // check after-cicle error(s) if ((UMC::UMC_OK != umcRes) && (UMC::UMC_NOT_ENOUGH_DATA != umcRes) && (UMC::UMC_NOT_FIND_SYNCWORD != umcRes)) break; umcRes = UMC::UMC_OK; m_eventSyncPoint1Audio.Set(); m_eventSyncPointAudio.Wait(); m_eventSyncPoint1Audio.Reset(); if (umcRes == UMC::UMC_WAIT_FOR_REPOSITION) { umcRes = UMC::UMC_OK; } TASK_SWITCH(); } while ((false == m_bStopFlag) && (0 != uiComprSize)); // check after-cicle error(s) if ((UMC::UMC_OK != umcRes) && (umcRes != UMC::UMC_WAIT_FOR_REPOSITION)) break; TASK_SWITCH(); } // send end of stream to renderer if (false == m_bStopFlag) { // wait until audio renderer will free enough intermal buffers do { umcRes = m_pAudioRender->LockInputBuffer(&UncomprData); if (UMC::UMC_NOT_ENOUGH_BUFFER == umcRes) vm_time_sleep(10); } while ((false == m_bStopFlag) && (UMC::UMC_NOT_ENOUGH_BUFFER == umcRes)); // check error(s) if (UMC::UMC_OK == umcRes) { UncomprData.SetDataSize(0); UncomprData.SetTime(0.0); m_pAudioRender->UnLockInputBuffer(&UncomprData, UMC::UMC_END_OF_STREAM); } } //to avoid deadlock during reposition m_eventSyncPoint1Audio.Set(); m_eventSyncPoint1Video.Set(); m_eventSyncPoint1Sync.Set(); vm_debug_trace(4, VM_STRING("AVSync::AudioProc exit\n")); //LOG (VM_STRING("AudioProc,-"));}//*/unsigned intAVSync::SyncThreadProc(void* pvParam){ assert(NULL != pvParam); reinterpret_cast<AVSync*>(pvParam)->SyncProc(); return 0;}unsigned intAVSync::VideoThreadProc(void* pvParam){ assert(NULL != pvParam); reinterpret_cast<AVSync*>(pvParam)->VideoProc(); return 0;}unsigned intAVSync::AudioThreadProc(void* pvParam){ assert(NULL != pvParam); reinterpret_cast<AVSync*>(pvParam)->AudioProc(); return 0;}voidAVSync::ResizeDisplay(UMC::RECT& rDispRect, UMC::RECT& rRangeRect){ if (NULL != m_pVideoRender) { if (2 >= rDispRect.bottom - rDispRect.top || 2 >= rDispRect.right - rDispRect.left) // I guess application was minimized { m_pVideoRender->HideSurface(); } else { m_pVideoRender->ShowSurface(); m_pVideoRender->ResizeDisplay(rDispRect, rRangeRect); } }}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -