⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 avsync.cpp

📁 audio-video-codecs.rar语音编解码器
💻 CPP
📖 第 1 页 / 共 4 页
字号:
                while((fabs(videoTime - audioTime) > REPOSITION_AVSYNC_PRECISION) && (umcRes != UMC::UMC_ERR_END_OF_STREAM))
                {
                    if((videoTime > audioTime) || (audioTime < 0))
                    {
                        vm_debug_trace(VM_DEBUG_NONE, VM_STRING("CheckNextAudioData start\n"));
                        umcRes = m_pSplitter->GetNextAudioData(&audioData);
                        audioTime = audioData.GetTime();
                        vm_time_sleep(1);
                        vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("CheckNextAudioData end = %lf\n"),audioData.GetTime());
                    }
                    if((fabs(videoTime - audioTime) < REPOSITION_AVSYNC_PRECISION) || (umcRes == UMC::UMC_ERR_END_OF_STREAM)) break;

                    if((videoTime < audioTime) || (videoTime < 0))
                    {
                        vm_debug_trace(VM_DEBUG_NONE, VM_STRING("GetNextVideoData start\n"));
                        umcRes = m_pSplitter->GetNextVideoData(&videoData);
                        videoTime = videoData.GetTime();
                        vm_time_sleep(1);
                        vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("GetNextVideoData end = %lf\n"),videoData.GetTime());
                    }
                }
            }
            vm_debug_trace2(VM_DEBUG_NONE, VM_STRING("Reposition ends: v = %.3lf a = %.3lf\n"), videoTime, audioTime);
        }

        if(m_pAudioRender)
        {
            m_pAudioRender->SetVolume(volume);
        }

        m_Stat.uiFramesRendered = 0;

        if (m_SyncThread.IsValid())
        {
            m_eventSyncPointSync.Set();
        }
        if (m_AudioThread.IsValid())
        {
            m_eventSyncPointAudio.Set();
        }
        if (m_VideoThread.IsValid())
        {
            m_eventSyncPointVideo.Set();
        }
    }*/
}

UMC::Status AVSync::GetFrameStub(UMC::MediaData* pInData,
                                 UMC::VideoData& rOutData,
                                 Ipp64f& rdfDecTime)
{
    UMC::Status umcRes = UMC::UMC_OK;
    vm_tick DecStartTime, DecStopTime;
    Ipp32s iInDataSize = 0;

    // If any input data is present
    if (NULL != pInData)
    {   iInDataSize = pInData->GetDataSize();   }

    DecStartTime = vm_time_get_tick();
    umcRes = m_pVideoDecoder->GetFrame(pInData, &rOutData);
    DecStopTime = vm_time_get_tick();

    rdfDecTime += (Ipp64f)(Ipp64s)(DecStopTime - DecStartTime) /
                                  (Ipp64f)(Ipp64s)m_lliFreq;
    vm_debug_trace1(VM_DEBUG_NONE,
                    VM_STRING("GetFrame: %lf\n"), rOutData.GetTime());
/*
    VM_ASSERT(NULL == pInData || 0 <= pInData->GetDataSize());
    if (NULL != pInData && 0 < pInData->GetDataSize())
    {
        if(!(m_ccParams.ulSplitterFlags & UMC::FLAG_VSPL_COMPATIBLE)) {
          Ipp64f dfPTS = pInData->GetTime();
          Ipp32s iDataSize = pInData->GetDataSize();
          Ipp32s iBufSize = pInData->GetBufferSize() - (iInDataSize - iDataSize);
          Ipp8u* pbNewPtr = (Ipp8u*)pInData->GetPointer();
          pbNewPtr += iInDataSize - pInData->GetDataSize();
          pInData->SetPointer(pbNewPtr, iBufSize);
          pInData->SetDataSize(iDataSize);
          pInData->SetTime(dfPTS);
       }
    }
*/
    return umcRes;
}

#define SKIP_FRAME_TOLERENCE 7

void AVSync::SyncProc()
{
    vm_debug_trace_withfunc(VM_DEBUG_FILE, VM_STRING("AVSync::SyncProc"), VM_STRING("SyncProc,+"));
    vm_tick t1 = 0, t2 = 0, t2_prev = 0;
    Ipp64f flip_time = 0.001;
    Ipp64f flip_times[3] = {0.001,0.001,0.001};
    UMC::Status umcRes = UMC::UMC_OK;
    bool bNoAudioAnyMore = (NULL == m_pAudioRender);
    bool bNoVideoAnyMore = (NULL == m_pVideoRender);
    bool bNullAudioRender = false;
    UMC::AudioRender* pStoppedAudioRender = NULL;
    Ipp64f prevVideoPts = 0;
    UMC::NULLAudioRenderParams renderParams;

    vm_debug_trace(VM_DEBUG_INFO, VM_STRING("AVSync::SyncProc start\n"));

    // Wait until video and audio decoding threads will pass some data to
    // the renders - we can't start synchronization without it
    while (!m_bStopFlag &&
          ((NULL != m_pVideoRender && !m_bVideoPlaying) ||
           (NULL != m_pAudioRender && !m_bAudioPlaying)))
    {   vm_time_sleep(1);   }

    m_Stat.uiFramesRendered = 0;
    // Continue video data rendering until no more data in audio and video
    // render left
    for (Ipp32s frame_num = 0, skip_window = 0;
         UMC::UMC_OK == umcRes && (!bNoAudioAnyMore || !bNoVideoAnyMore) && !m_bStopFlag;
         frame_num++)
    {
        m_Stat.uiFrameNum = frame_num + m_Stat.uiSkippedNum;

        // Check next frame PTS if any
        if (!bNoVideoAnyMore)
        {
            UMC::Status get_fr_sts = UMC::UMC_OK;
            do
            {
                VM_ASSERT(NULL != m_pVideoRender);
                get_fr_sts = m_pVideoRender->GetRenderFrame(&(m_Stat.dfFrameTime));
                if (UMC::UMC_OK != get_fr_sts)
                    vm_time_sleep(5);
            } while (get_fr_sts == UMC::UMC_ERR_TIMEOUT && !m_bStopFlag);

            vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("next frame: %f"), m_Stat.dfFrameTime);

            if (m_Stat.dfFrameTime == -1.0 || get_fr_sts != UMC::UMC_OK)
            {
               bNoVideoAnyMore = true;
            }
        }
        else
        {
            vm_time_sleep(5);
        }
        if(frame_num == 0)
        {
            prevVideoPts = m_Stat.dfFrameTime;
        }

        // If we have no more audio but some video or if we have no audio at all
        if (m_bSync && ((NULL == m_pAudioRender ) ||
                        (NULL == pStoppedAudioRender &&
                         m_bAudioPlaying &&
                         bNoAudioAnyMore &&
                        !bNoVideoAnyMore)))
        {
            pStoppedAudioRender = m_pAudioRender;
            m_pAudioRender = new UMC::NULLAudioRender(m_Stat.dfFrameTime);
            if (NULL == m_pAudioRender)
            {
                //  signal error, stop everything
                m_bStopFlag = true;
                umcRes = UMC::UMC_ERR_ALLOC;
            }
            else
            {
                // Start time counting
                m_pAudioRender->Pause(false);
                m_bAudioPlaying = true;
                bNullAudioRender = true;
            }
        }


        TASK_SWITCH();

        Ipp64f ft = m_Stat.dfFrameTime - flip_time;

        vm_debug_trace2(VM_DEBUG_NONE, VM_STRING("#%d, video time: %f\n"),
                       frame_num, m_Stat.dfFrameTime);

        // Let's synchronize video to audio if there is some data in the audio
        // render or NULLAudioRender is used
        if (!bNoAudioAnyMore || bNullAudioRender)
        {
            VM_ASSERT(NULL != m_pAudioRender);
            VM_ASSERT(NULL != m_pVideoDecoder);

            Ipp64f dfAudioTime = m_pAudioRender->GetTime();
            dfAudioTime += m_pAudioRender->GetDelay();

            vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("\t\taudio time: %f\n"), dfAudioTime);
            vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("video time: %f\n"), ft);

            // Wait for the next video frame display time if we have one
            if (!bNoVideoAnyMore && m_bSync)
            {
                if(prevVideoPts > ft + 1.0 || prevVideoPts + 1.0 < ft) //PTS jump
                {
                    if(abs(dfAudioTime - ft) > 1.0)
                    {
                        // try to syncronize video and audio after PTS jump
                        if (!bNullAudioRender)
                        {
                            volatile Ipp64f st1;
                            Ipp32u   n = 0;
                            for (st1 = dfAudioTime;
                                st1 > 0 && (abs(st1 - ft) > 1.0) && !m_bStopFlag && n < 100; n++)
                            {
                                vm_time_sleep(5);
                                st1 = m_pAudioRender->GetTime();
                                vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("\t\t\trestore time: %f\n"), st1);
                                dfAudioTime = st1;
                            }
                        }
                        else
                        {

                            renderParams.m_InitPTS = m_Stat.dfFrameTime;
                            m_pAudioRender->SetParams(&renderParams);
                            m_pAudioRender->Pause(false);
                        }
                     }
                }

                if (ft > dfAudioTime)
                {
                    skip_window = 0;
                    umcRes = m_pVideoDecoder->ResetSkipCount();
                    if (UMC::UMC_ERR_NOT_IMPLEMENTED == umcRes)
                    {    umcRes = UMC::UMC_OK;    }

                    volatile Ipp64f st1;
                    Ipp32u   n = 0;

                    for (st1 = dfAudioTime;
                         st1 >= dfAudioTime && ft > st1 && !m_bStopFlag && n < 100 ;n++)
                    {
                        Ipp32f a=0;
                        vm_time_sleep(max(0,min(1,(Ipp32s)((ft-st1)*1000))));
                        st1 = m_pAudioRender->GetTime();
                        a = m_pAudioRender->GetDelay();
                        st1 += a;
                        vm_debug_trace2(VM_DEBUG_NONE, VM_STRING("wait:\t\taudio time: %f+%f\n"),
                                        st1,a);
                        if(m_bPaused)
                        {
                           m_pVideoRender->ShowLastFrame();
                        }
                    }
                }
                else if (ft < dfAudioTime &&
                         (dfAudioTime - ft > (0.7/m_dfFrameRate)))
                {
                    if (++skip_window >= SKIP_FRAME_TOLERENCE)
                    {
                        skip_window = 0;
                        umcRes = m_pVideoDecoder->SkipVideoFrame(1);
                        if (UMC::UMC_ERR_NOT_IMPLEMENTED == umcRes)
                        {   umcRes = UMC::UMC_OK;   }
                        vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("skip: %d\n"), 1);
                    }
                }
            }

            // Stop synchronization efforts and play the rest of the video on
            // maximum speed if we have no more audo samples
            if (-1.0 == dfAudioTime)
            {   bNoAudioAnyMore = true; }
        }
        prevVideoPts = m_Stat.dfFrameTime;

        // It's time to display next video frame
        if (UMC::UMC_OK == umcRes && !bNoVideoAnyMore)
        {
            t1 = vm_time_get_tick();
            umcRes = m_pVideoRender->RenderFrame();
            t2 = vm_time_get_tick();
            m_Stat.uiFramesRendered++;
        }

        // Update Statistic structure and frame display statistic
        if (UMC::UMC_OK == umcRes && !bNoVideoAnyMore)
        {
            m_Stat.uiSkippedNum = m_pVideoDecoder->GetNumOfSkippedFrames();

            Ipp64f this_flip_time =
                    (Ipp64f)(Ipp64s)(t2-t1)/(Ipp64f)(Ipp64s)m_lliFreq;

            flip_times[0] = flip_times[1];
            flip_times[1] = flip_times[2];
            flip_times[2] = this_flip_time;

            flip_time = (flip_times[0] + flip_times[1] + flip_times[2]) / 3;

            while (VM_TIMEOUT == m_StepEvent.Wait(500))
            {
                m_pVideoRender->ShowLastFrame();
                if (m_bStopFlag)
                {   break;  }
            }

            // ignore the first frame (might be a long wait to be synchronized
            if (1 < m_Stat.uiFrameNum)
            {
                m_Stat.dfRenderTime += (Ipp64f)(Ipp64s)(t2-t2_prev) /
                                               (Ipp64f)(Ipp64s)m_lliFreq;
                m_Stat.dfRenderRate =
                    (Ipp64f)(m_Stat.uiFrameNum - 1) / m_Stat.dfRenderTime;
            }
            t2_prev = t2;

            TASK_SWITCH();
        }

    }

    if (NULL != pStoppedAudioRender)
    {
        pStoppedAudioRender->Close();
        delete pStoppedAudioRender;
    }
    vm_debug_trace(VM_DEBUG_INFO, VM_STRING("AVSync::SyncProc exit\n"));
    vm_debug_trace_withfunc(VM_DEBUG_FILE, VM_STRING("AVSync::SyncProc"), VM_STRING("SyncProc,-"));
}

void AVSync::VideoProc()
{
    vm_debug_trace_withfunc(VM_DEBUG_FILE, VM_STRING("AVSync::VideoProc"), VM_STRING("VideoProc,+"));
    UMC::Status umcRes = UMC::UMC_OK;
    UMC::Status umcSplitRes = UMC::UMC_OK;
    UMC::MediaDataEx data;
    UMC::VideoData out_data;
    bool bVideoRndrIsLocked = false;

    out_data.Init(m_DecodedFrameSize.width,
                  m_DecodedFrameSize.height,
                  m_cFormat,
                  m_ccParams.iBitDepth);

    vm_debug_trace(VM_DEBUG_INFO, VM_STRING("AVSync::VideoProc start\n"));
    // Main decoding cycle
    for (Ipp32s iFrameNum = 0; !m_bStopFlag && UMC::UMC_OK == umcRes; iFrameNum++)
    {
        // Wait for the free buffer in the video render
        do
        {
            umcRes = m_pVideoRender->LockInputBuffer(&out_data);

            // Be aware that video render surface was locked and not released yet
            if (UMC::UMC_OK == umcRes)
            {
                bVideoRndrIsLocked = true;
                break;
            }

            // there is only one legal error return value,
            // all other are incorrect.
            else if ((!m_bStopFlag) &&
                (UMC::UMC_ERR_NOT_ENOUGH_BUFFER == umcRes ||UMC::UMC_ERR_TIMEOUT == umcRes ))
            {
                vm_time_sleep(10);
            }
            else
            {
                vm_string_printf(VM_STRING("Error in video render\n"));
                break;
            }

        } while (!m_bStopFlag );




        // Repeat decode procedure until the decoder will agree to decompress
        // at least one frame
        Ipp64f dfDecTime = 0;
        do {
            // Get some more data from the the splitter if we've decoded
            // all data from the previous buffer
            if ((4 >= data.GetDataSize() || umcRes == UMC::UMC_ERR_NOT_ENOUGH_DATA)
                && UMC::UMC_OK == umcSplitRes)
            {
                while ((UMC::UMC_ERR_NOT_ENOUGH_DATA==(umcSplitRes = umcRes = m_pSplitter->GetNextData(&data,m_nVideoTrack ))) &&  !m_bStopFlag)
                {
                    vm_time_sleep(5);

                }
                vm_debug_trace1(VM_DEBUG_ALL, VM_STRING("VideoProc: data size from splitter is %f\n"),
                               data.GetTime());
                if(UMC::UMC_WRN_REPOSITION_INPROGRESS == umcSplitRes)
                {
                    umcSplitRes = UMC::UMC_OK;
                }
            }
            else
            {   umcRes = UMC::UMC_OK;   }

            vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("Splitter PTS: %lf\n"), data.GetTime());

            // Ok, here is no more data in the splitter. Let's extract the rest
            // of decoded data from the decoder
            if (UMC::UMC_OK == umcRes && NULL == data.GetDataPointer())
            {   umcSplitRes = UMC::UMC_ERR_END_OF_STREAM;   }

            if (UMC::UMC_ERR_END_OF_STREAM == umcSplitRes)
            {   umcRes = UMC::UMC_OK;   }

            out_data.SetTime(data.GetTime());

            if (UMC::UMC_OK == umcRes)
            {
                if (UMC::UMC_ERR_END_OF_STREAM != umcSplitRes)
                {   umcRes = GetFrameStub(&data, out_data, dfDecTime);  }
                else

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -