📄 avsync.cpp
字号:
vm_debug_trace(-1, VM_STRING("CheckNextVideoData end = %lf\n"),videoData.GetTime()); vm_time_sleep(1); } while(audioTime < 0 && (umcRes != UMC::UMC_END_OF_STREAM)) { vm_debug_trace(-1, VM_STRING("CheckNextAudioData start\n")); umcRes = m_pSplitter->GetNextAudioData(&audioData); umcRes = m_pSplitter->CheckNextAudioData(&audioData); audioTime = audioData.GetTime(); vm_debug_trace(-1, VM_STRING("CheckNextAudioData end = %lf\n"),audioData.GetTime()); vm_time_sleep(1); } if((videoTime > audioTime) && (REPOSITION_AVSYNC_PRECISION != -1)) { while((fabs(videoTime - audioTime) > REPOSITION_AVSYNC_PRECISION) && (umcRes != UMC::UMC_END_OF_STREAM)) { if((videoTime > audioTime) || (audioTime < 0)) { vm_debug_trace(-1, VM_STRING("CheckNextAudioData start\n")); umcRes = m_pSplitter->GetNextAudioData(&audioData); audioTime = audioData.GetTime(); vm_time_sleep(1); vm_debug_trace(-1, VM_STRING("CheckNextAudioData end = %lf\n"),audioData.GetTime()); } if((fabs(videoTime - audioTime) < REPOSITION_AVSYNC_PRECISION) || (umcRes == UMC::UMC_END_OF_STREAM)) break; if((videoTime < audioTime) || (videoTime < 0)) { vm_debug_trace(-1, VM_STRING("GetNextVideoData start\n")); umcRes = m_pSplitter->GetNextVideoData(&videoData); videoTime = videoData.GetTime(); vm_time_sleep(1); vm_debug_trace(-1, VM_STRING("GetNextVideoData end = %lf\n"),videoData.GetTime()); } } } vm_debug_trace(-1, VM_STRING("Reposition ends: v = %.3lf a = %.3lf\n"), videoTime, audioTime); } if(m_pAudioRender) { m_pAudioRender->SetVolume(volume); } m_Stat.uiFramesRendered = 0; if (m_SyncThread.IsValid()) { m_eventSyncPointSync.Set(); } if (m_AudioThread.IsValid()) { m_eventSyncPointAudio.Set(); } if (m_VideoThread.IsValid()) { m_eventSyncPointVideo.Set(); } }}UMC::StatusAVSync::GetFrameStub(UMC::MediaData* pInData, UMC::VideoData& rOutData, double& rdfDecTime){ UMC::Status umcRes = UMC::UMC_OK; vm_tick DecStartTime, DecStopTime; int iInDataSize = 0; // If any input data is present if (NULL != pInData) { iInDataSize = pInData->GetDataSize(); } DecStartTime = vm_time_get_tick(); umcRes = m_pVideoDecoder->GetFrame(pInData, &rOutData); DecStopTime = vm_time_get_tick(); rdfDecTime = (double)(vm_var64s)(DecStopTime - DecStartTime) / (double)(vm_var64s)m_lliFreq; vm_debug_trace((UMC::UMC_OK == umcRes) ? 0 : 0, VM_STRING("GetFrame: %lf\n"), rOutData.GetTime());/* assert(NULL == pInData || 0 <= pInData->GetDataSize()); if (NULL != pInData && 0 < pInData->GetDataSize()) { if(!(m_ccParams.ulSplitterFlags & UMC::FLAG_VSPL_COMPATIBLE)) { double dfPTS = pInData->GetTime(); int iDataSize = pInData->GetDataSize(); int iBufSize = pInData->GetBufferSize() - (iInDataSize - iDataSize); vm_byte* pbNewPtr = (vm_byte*)pInData->GetPointer(); pbNewPtr += iInDataSize - pInData->GetDataSize(); pInData->SetPointer(pbNewPtr, iBufSize); pInData->SetDataSize(iDataSize); pInData->SetTime(dfPTS); } }*/ return umcRes;}#define SKIP_FRAME_TOLERENCE 7voidAVSync::SyncProc(){ LOG(VM_STRING("SyncProc,+")); vm_tick t1 = 0, t2 = 0, t2_prev = 0; double flip_time = 0.001; double flip_times[3] = {0.001,0.001,0.001}; UMC::Status umcRes = UMC::UMC_OK; bool bNoAudioAnyMore = (NULL == m_pAudioRender); bool bNoVideoAnyMore = (NULL == m_pVideoRender); bool bNullAudioRender = false; UMC::AudioRender* pStoppedAudioRender = NULL; double prevVideoPts = 0; vm_debug_trace(4, VM_STRING("AVSync::SyncProc start\n")); // Wait until video and audio decoding threads will pass some data to // the renders - we can't start synchronization without it while (UMC::UMC_OK == umcRes && !m_bStopFlag && ((NULL != m_pVideoRender && !m_bVideoPlaying) || (NULL != m_pAudioRender && !m_bAudioPlaying))) { vm_time_sleep(1); } m_Stat.uiFramesRendered = 0; // Continue video data rendering until no more data in audio and video // render left for (int frame_num = 0, skip_window = 0; UMC::UMC_OK == umcRes && (!bNoAudioAnyMore || !bNoVideoAnyMore) && !m_bStopFlag; frame_num++) { m_Stat.uiFrameNum = frame_num + m_Stat.uiSkippedNum; // Check next frame PTS if any if (NULL != m_pVideoRender && !bNoVideoAnyMore) { m_Stat.dfFrameTime = m_pVideoRender->GetRenderFrame(); vm_debug_trace(0, VM_STRING("next frame: %f"), m_Stat.dfFrameTime); if (m_Stat.dfFrameTime == -1.0) { if(m_bStopSyncForReposition) umcRes = UMC::UMC_WAIT_FOR_REPOSITION; else bNoVideoAnyMore = true; } } if(frame_num == 0) { prevVideoPts = m_Stat.dfFrameTime; } // If we have no more audio but some video or if we have no audio at all if ((NULL == m_pAudioRender && m_bSync) || (UMC::UMC_OK == umcRes && NULL == pStoppedAudioRender && m_bAudioPlaying && bNoAudioAnyMore && !bNoVideoAnyMore)) { pStoppedAudioRender = m_pAudioRender; m_pAudioRender = new UMC::NULLAudioRender(m_Stat.dfFrameTime); if (NULL == m_pAudioRender) { // signal error, stop everything m_bStopFlag = true; umcRes = UMC::UMC_ALLOC; } else { // Start time counting m_pAudioRender->Pause(false); m_bAudioPlaying = true; bNullAudioRender = true; } } TASK_SWITCH(); double ft = m_Stat.dfFrameTime - flip_time; vm_debug_trace(0, VM_STRING("#%d, video time: %f\n"), frame_num, m_Stat.dfFrameTime); // Let's synchronize video to audio if there is some data in the audio // render or NULLAudioRender is used if (UMC::UMC_OK == umcRes && (!bNoAudioAnyMore || bNullAudioRender)) { VM_ASSERT(NULL != m_pAudioRender); VM_ASSERT(NULL != m_pVideoDecoder); double dfAudioTime = m_pAudioRender->GetTime(); dfAudioTime += m_pAudioRender->GetDelay(); vm_debug_trace(-1, VM_STRING("\t\taudio time: %f\n"), dfAudioTime); vm_debug_trace(-1, VM_STRING("video time: %f\n"), ft); // Wait for the next video frame display time if we have one if (!bNoVideoAnyMore) { if(prevVideoPts > ft + flip_time) { if(abs(dfAudioTime - ft) > 5) { volatile double st1; int dead_lock_count = 0; for (st1 = dfAudioTime; //UMC_OK == umcRes && st1 > 0 && (abs(st1 - ft) > 5) && !m_bStopFlag && !m_bStopSyncForReposition;) { vm_time_sleep(5); dead_lock_count++; st1 = m_pAudioRender->GetTime(); vm_debug_trace(-1, VM_STRING("\t\t\trestore time: %f\n"), st1); if(dead_lock_count > 200) { dead_lock_count = -1; break; } } if(dead_lock_count == -1) { bNoAudioAnyMore = true; } dfAudioTime = st1; } } if (ft > dfAudioTime) { skip_window = 0; umcRes = m_pVideoDecoder->ResetSkipCount(); if (UMC::UMC_NOT_IMPLEMENTED == umcRes) { umcRes = UMC::UMC_OK; } volatile double st1; for (st1 = dfAudioTime; //UMC_OK == umcRes && st1 > 0 && ft > st1 && !m_bStopFlag && !m_bStopSyncForReposition;) { vm_time_sleep(max(0,min(1,(int)((ft-st1)*1000)))); st1 = m_pAudioRender->GetTime(); st1 += m_pAudioRender->GetDelay(); vm_debug_trace(-1, VM_STRING("wait: %d\t\taudio time: %f\n"), max(0,min(1,(int)((ft-st1)*1000))), st1); if(m_bPaused) { m_pVideoRender->ShowLastFrame(); } } } else if (ft < dfAudioTime && (dfAudioTime - ft > (0.7/m_dfFrameRate))) { if (++skip_window >= SKIP_FRAME_TOLERENCE) { skip_window = 0; umcRes = m_pVideoDecoder->SkipVideoFrame(1); if (UMC::UMC_NOT_IMPLEMENTED == umcRes) { umcRes = UMC::UMC_OK; } vm_debug_trace(0, VM_STRING("skip: %d\n"), 1); } } } // Stop synchronization efforts and play the rest of the video on // maximum speed if we have no more audo samples if (-1.0 == dfAudioTime) { bNoAudioAnyMore = true; } } prevVideoPts = m_Stat.dfFrameTime; // It's time to display next video frame if (UMC::UMC_OK == umcRes && !bNoVideoAnyMore) { t1 = vm_time_get_tick(); umcRes = m_pVideoRender->RenderFrame(); t2 = vm_time_get_tick(); m_Stat.uiFramesRendered++; } // Update Statistic structure and frame display statistic if (UMC::UMC_OK == umcRes) { m_Stat.uiSkippedNum = m_pVideoDecoder->GetSkipedFrame(); double this_flip_time = (double)(vm_var64s)(t2-t1)/(double)(vm_var64s)m_lliFreq; flip_times[0] = flip_times[1]; flip_times[1] = flip_times[2]; flip_times[2] = this_flip_time; flip_time = (flip_times[0] + flip_times[1] + flip_times[2]) / 3; while (VM_TIMEOUT == m_StepEvent.Wait(500)) { m_pVideoRender->ShowLastFrame(); if (m_bStopFlag) { break; } } // ignore the first frame (might be a long wait to be synchronized if (1 < m_Stat.uiFrameNum) { m_Stat.dfRenderTime += (double)(vm_var64s)(t2-t2_prev) / (double)(vm_var64s)m_lliFreq; m_Stat.dfRenderRate = (double)(m_Stat.uiFrameNum - 1) / m_Stat.dfRenderTime; } t2_prev = t2; TASK_SWITCH(); } m_eventSyncPoint1Sync.Set(); m_eventSyncPointSync.Wait(); m_eventSyncPoint1Sync.Reset(); if(umcRes == UMC::UMC_WAIT_FOR_REPOSITION) { umcRes = UMC::UMC_OK; } } if (NULL != pStoppedAudioRender) { delete pStoppedAudioRender; } //to avoid deadlock during reposition m_eventSyncPoint1Sync.Set(); vm_debug_trace(4, VM_STRING("AVSync::SyncProc exit\n")); LOG(VM_STRING("SyncProc,-"));}voidAVSync::VideoProc(){ LOG(VM_STRING("VideoProc,+")); UMC::Status umcRes = UMC::UMC_OK; UMC::Status umcSplitRes = UMC::UMC_OK; UMC::MediaDataEx data; UMC::VideoData out_data; bool bVideoRndrIsLocked = false; out_data.SetVideoParameters(m_DecodedFrameSize.width, m_DecodedFrameSize.height, m_cFormat); vm_debug_trace(4, VM_STRING("AVSync::VideoProc start\n")); // Main decoding cycle for (int iFrameNum = 0; !m_bStopFlag && UMC::UMC_OK == umcRes; iFrameNum++) { // Wait for the free buffer in the video render do { umcRes = m_pVideoRender->LockInputBuffer(&out_data); // Be aware that video render surface was locked and not released yet if (UMC::UMC_OK == umcRes) { bVideoRndrIsLocked = true; } if (UMC::UMC_TIMEOUT == umcRes || (UMC::UMC_FAILED_TO_ALLOCATE_BUFFER == umcRes && !m_bStopFlag)) { vm_time_sleep(10); } else { break; } } while (!m_bStopFlag && !m_bStopSyncForReposition); if(m_bStopSyncForReposition) { umcRes = UMC::UMC_WAIT_FOR_REPOSITION; } if (((UMC::UMC_OK != umcRes) && (umcRes != UMC::UMC_WAIT_FOR_REPOSITION)) || m_bStopFlag) { break; } // Repeat decode procedure until the decoder will agree to decompress // at least one frame double dfDecTime = 0; do { // Get some more data from the the splitter if we've decoded // all data from the previous buffer if ((4 >= data.GetDataSize() || umcRes == UMC::UMC_NOT_ENOUGH_DATA) && UMC::UMC_OK == umcSplitRes) { umcSplitRes = umcRes = m_pSplitter->GetNextVideoData(&data); vm_debug_trace(-1, VM_STRING("VideoProc: data size from splitter is %d\n"), data.GetDataSize()); if(UMC::UMC_WAIT_FOR_REPOSITION == umcSplitRes) { umcSplitRes = UMC::UMC_OK; } } else { umcRes = UMC::UMC_OK; } vm_debug_trace(0, VM_STRING("Splitter PTS: %lf\n"), data.GetTime());
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -