📄 avsync.cpp
字号:
// take buffered reference frames from the decoder
{ umcRes = GetFrameStub(NULL, out_data, dfDecTime); }
if(umcRes != UMC::UMC_OK)
{ vm_time_sleep(10);}
}
vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("Decoder PTS: %lf\n"),
out_data.GetTime());
//vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("%d\n"),
// out_data.m_FrameType);
} while (((UMC::UMC_ERR_NOT_ENOUGH_DATA == umcRes && UMC::UMC_ERR_END_OF_STREAM != umcSplitRes) ||
UMC::UMC_ERR_SYNC == umcRes) && !m_bStopFlag);
if (UMC::UMC_OK != umcRes || m_bStopFlag)
{ break; }
// Update statistic structure
{
m_Stat.dfConversionTime = m_ColorConverter.GetConversionTime();
m_Stat.dfDecodeTime += dfDecTime;
m_Stat.uiFramesDecoded++;
m_Stat.dfDecodeRate =
(Ipp64f)(Ipp64s)(m_Stat.uiFramesDecoded) / m_Stat.dfDecodeTime;
if (0.0 != m_Stat.dfConversionTime)
{
m_Stat.dfConversionRate =
(Ipp64f)(Ipp64s)(m_Stat.uiFramesDecoded) / m_Stat.dfConversionTime;
}
else
{ m_Stat.dfConversionRate = 0.0; }
}
// Unlock video render surface
if (bVideoRndrIsLocked)
{
umcRes = m_pVideoRender->UnLockInputBuffer(&out_data);
bVideoRndrIsLocked = false;
}
m_bVideoPlaying = true;
vm_time_sleep(0);
}
if(m_bVideoPlaying == false)
m_bStopFlag = true;
if (bVideoRndrIsLocked)
{
out_data.SetTime(-1.0);
out_data.SetFrameType(UMC::NONE_PICTURE);
umcRes = m_pVideoRender->UnLockInputBuffer(&out_data, UMC::UMC_ERR_END_OF_STREAM);
//m_bVideoPlaying = true;
}
m_pVideoRender->Stop();
vm_debug_trace(VM_DEBUG_INFO, VM_STRING("AVSync::VideoProc start exiting\n"));
// m_bVideoPlaying = false;
vm_debug_trace(VM_DEBUG_INFO, VM_STRING("AVSync::VideoProc exit\n"));
vm_debug_trace_withfunc(VM_DEBUG_FILE, VM_STRING("AVSync::VideoProc"), VM_STRING("VideoProc,-"));
}
#define LockBuffer(pFunction, pMedium, n ,bStop) \
{ \
do \
{ \
umcRes = pFunction(pMedium,n); \
if ((UMC::UMC_ERR_NOT_ENOUGH_DATA == umcRes) || \
(UMC::UMC_ERR_NOT_ENOUGH_BUFFER == umcRes)) \
vm_time_sleep(5); \
} while ((false == bStop) && \
((UMC::UMC_ERR_NOT_ENOUGH_DATA == umcRes) || \
(UMC::UMC_ERR_NOT_ENOUGH_BUFFER == umcRes))); \
}
template<class typeSource, class typeMedium>
UMC::Status LockInputBuffer(typeSource *pSource, typeMedium *pMedium, bool *pbStop)
{
UMC::Status umcRes;
do
{
umcRes = pSource->LockInputBuffer(pMedium);
if (UMC::UMC_ERR_NOT_ENOUGH_BUFFER == umcRes)
vm_time_sleep(5);
} while ((false == *pbStop) &&
(UMC::UMC_ERR_NOT_ENOUGH_BUFFER == umcRes));
return umcRes;
} // UMC::Status LockInputBuffer(typeSource *pSource, typeMedium *pMedium, bool *pbStop)
template<class typeDestination, class typeMedium>
UMC::Status LockOutputBuffer(typeDestination *pDestination, typeMedium *pMedium, bool *pbStop)
{
UMC::Status umcRes;
do
{
umcRes = pDestination->LockOutputBuffer(pMedium);
if (UMC::UMC_ERR_NOT_ENOUGH_DATA == umcRes)
vm_time_sleep(5);
} while ((false == *pbStop) &&
(UMC::UMC_ERR_NOT_ENOUGH_DATA == umcRes));
return umcRes;
} // UMC::Status LockOutputBuffer(typeDestination *pDestination, typeMedium *pMedium, bool *pbStop)
void AVSync::AudioProc()
{
//LOG (VM_STRING("AudioProc,+"));
UMC::Status umcRes = UMC::UMC_OK;/*
UMC::Status umcSplRes = UMC::UMC_OK;*/
UMC::MediaData ComprData;
UMC::AudioData UncomprData;
bool bSplitterIsEmpty = false;
Ipp64f dfStartTime = 0;
Ipp32u uiComprSize = 0;
Ipp32u uiShift = 0;
// check error(s)
VM_ASSERT(NULL != m_pSplitter);
vm_debug_trace(VM_DEBUG_INFO, VM_STRING("AVSync::AudioProc start\n"));
// Continue passing data from the splitter to decoder and from
// decoder to the render
UncomprData.m_info = *m_pAudioInfo;
while ((false == m_bStopFlag) &&
(false == bSplitterIsEmpty))
{
// 1st step: obtain data from splitter
LockBuffer(m_pSplitter->GetNextData, &ComprData,m_nAudioTrack, m_bStopFlag);
vm_debug_trace1(VM_DEBUG_ALL, VM_STRING("AudioProc: data size from splitter is %f\n"),
ComprData.GetTime());
// check error(s) & end of stream
if (UMC::UMC_ERR_END_OF_STREAM == umcRes)
{
bSplitterIsEmpty = true;
ComprData.SetDataSize(0);
}
else if (UMC::UMC_OK != umcRes)
break;
// save data size and data time
uiComprSize = ComprData.GetDataSize();
dfStartTime = ComprData.GetTime();
// decode data and pass them to renderer
uiShift = 0;
do
{
// 2nd step: compressed data should be passed to the decoder first
if (m_pDSAudioCodec)
{
UMC::MediaData buff;
// get decoder's internal buffer
umcRes = m_pDSAudioCodec->LockInputBuffer(&buff);
// check error(s)
if (UMC::UMC_OK != umcRes)
break;
// Copy compressed data to the decoder's buffer
if (UMC::UMC_OK == umcRes)
{
Ipp32u uiDataToCopy = IPP_MIN((Ipp32u) buff.GetBufferSize(), uiComprSize);
memcpy(buff.GetDataPointer(),
(Ipp8u*)ComprData.GetDataPointer() + uiShift,
uiDataToCopy);
buff.SetDataSize(uiDataToCopy);
buff.SetTime(dfStartTime);
umcRes = m_pDSAudioCodec->UnLockInputBuffer(&buff,
(bSplitterIsEmpty) ? (UMC::UMC_ERR_END_OF_STREAM) : (UMC::UMC_OK));
// check error(s)
if (UMC::UMC_OK != umcRes)
break;
uiShift += uiDataToCopy;
uiComprSize -= uiDataToCopy;
}
}
do
{
// wait until audio renderer will free enough internal buffers
umcRes = LockInputBuffer(m_pAudioRender, &UncomprData, (bool *) &m_bStopFlag);
// check error(s)
if (UMC::UMC_OK != umcRes)
{
if (!m_bStopFlag)
vm_string_printf(VM_STRING("Error in audio render\n"));
TASK_SWITCH();
break;
}
// move decoded data to the renderer
// brunch for compressed data
if (m_pDSAudioCodec)
{
Ipp64f dfStart, dfEnd;
UncomprData.SetDataSize(0);
vm_tick ullDecTime = vm_time_get_tick();
umcRes = m_pDSAudioCodec->GetFrame(&UncomprData);
ullDecTime = vm_time_get_tick() - ullDecTime;
// check error(s)
if (UMC::UMC_OK != umcRes)
break;
VM_ASSERT(UMC::UMC_ERR_UNSUPPORTED != umcRes);
// calculate decoding time
m_Stat.dfAudioDecodeTime += (Ipp64f)(Ipp64s)(ullDecTime) /
(Ipp64f)(Ipp64s)m_lliFreq;
UncomprData.GetTime(dfStart, dfEnd);
m_Stat.dfAudioPlayTime += dfEnd - dfStart;
}
// branch for PCM data
else
{
Ipp64f dfStart = 0.0;
Ipp64f dfEnd = 0.0;
if (0 == uiComprSize)
break;
Ipp32u uiDataToCopy = IPP_MIN(uiComprSize, UncomprData.GetBufferSize());
memcpy(UncomprData.GetDataPointer(),
((Ipp8u*)ComprData.GetDataPointer()) + uiShift,
uiDataToCopy);
UncomprData.SetDataSize(uiDataToCopy);
ComprData.GetTime(dfStart, dfEnd);
Ipp64f dfNorm = (dfEnd - dfStart) / (uiShift + uiComprSize);
dfStart += dfNorm * uiShift;
dfEnd = dfStart + dfNorm * uiDataToCopy;
UncomprData.SetTime(dfStart, dfEnd);
uiShift += uiDataToCopy;
uiComprSize -= uiDataToCopy;
vm_debug_trace2(VM_DEBUG_INFO, VM_STRING("AudioProc: start: %lf, stop %lf\n"),
dfStart, dfEnd);
}
// call finalizing function
if (UncomprData.GetDataSize())
{
Ipp64f dfEndTime;
vm_debug_trace1(VM_DEBUG_NONE, VM_STRING("taudio time: %f\n"), UncomprData.GetTime());
UncomprData.GetTime(dfStartTime, dfEndTime);
m_Stat.dfAudioDecodeRate = (Ipp64f)(m_Stat.dfAudioDecodeTime / m_Stat.dfAudioPlayTime);
dfStartTime = dfEndTime;
umcRes = m_pAudioRender->UnLockInputBuffer(&UncomprData);
// check error(s)
TASK_SWITCH();
if (UMC::UMC_OK != umcRes)
break;
// open SyncProc() only after render starts
if (-1. != m_pAudioRender->GetTime())
m_bAudioPlaying = true;
}
} while (false == m_bStopFlag);
// check after-cicle error(s)
if ((UMC::UMC_OK != umcRes) &&
(UMC::UMC_ERR_NOT_ENOUGH_DATA != umcRes) &&
(UMC::UMC_ERR_SYNC != umcRes))
break;
umcRes = UMC::UMC_OK;
} while ((false == m_bStopFlag) &&
(0 != uiComprSize));
// check after-cicle error(s)
if (UMC::UMC_OK != umcRes)
break;
}
// send end of stream to renderer
if (false == m_bStopFlag)
{
// wait until audio renderer will free enough intermal buffers
do
{
umcRes = m_pAudioRender->LockInputBuffer(&UncomprData);
if (UMC::UMC_ERR_NOT_ENOUGH_BUFFER == umcRes)
vm_time_sleep(10);
} while ((false == m_bStopFlag) &&
(UMC::UMC_ERR_NOT_ENOUGH_BUFFER == umcRes));
// check error(s)
if (UMC::UMC_OK == umcRes)
{
UncomprData.SetDataSize(0);
UncomprData.SetTime(0.0);
umcRes = m_pAudioRender->UnLockInputBuffer(&UncomprData, UMC::UMC_ERR_END_OF_STREAM);
}
}
vm_debug_trace(VM_DEBUG_INFO, VM_STRING("AVSync::AudioProc exit\n"));
//LOG (VM_STRING("AudioProc,-"));
}
Ipp32u VM_THREAD_CALLCONVENTION AVSync::SyncThreadProc(void* pvParam)
{
VM_ASSERT(NULL != pvParam);
reinterpret_cast<AVSync*>(pvParam)->SyncProc();
return 0;
}
Ipp32u VM_THREAD_CALLCONVENTION AVSync::VideoThreadProc(void* pvParam)
{
VM_ASSERT(NULL != pvParam);
reinterpret_cast<AVSync*>(pvParam)->VideoProc();
return 0;
}
Ipp32u VM_THREAD_CALLCONVENTION AVSync::AudioThreadProc(void* pvParam)
{
VM_ASSERT(NULL != pvParam);
reinterpret_cast<AVSync*>(pvParam)->AudioProc();
return 0;
}
void AVSync::ResizeDisplay(UMC::RECT& rDispRect, UMC::RECT& rRangeRect)
{
if (NULL != m_pVideoRender)
{
if (2 >= rDispRect.bottom - rDispRect.top ||
2 >= rDispRect.right - rDispRect.left)
// I guess application was minimized
{ m_pVideoRender->HideSurface(); }
else
{
m_pVideoRender->ShowSurface();
m_pVideoRender->ResizeDisplay(rDispRect, rRangeRect);
}
}
}
UMC::Status AVSync::SetTrickModeSpeed(CommonCtl& rControlParams, Ipp32u trickFlag, Ipp64f offset)
{
UMC::Status umcRes = UMC::UMC_OK;
/* Ipp64u pos = m_pDataReader->GetPosition();
Ipp64u size = m_pDataReader->GetSize();
Ipp64f dpos = (Ipp64f)pos/(Ipp64f)size;
Close();
rControlParams.uiTrickMode = trickFlag;
if(UMC::UMC_OK == umcRes)
umcRes = Init(rControlParams);
if(UMC::UMC_OK == umcRes)
{
umcRes = Start();
SetPosition(dpos);
}*/
UMC::VideoDecoderParams VideoDecoderInit;
UMC::AudioRenderParams AudioRenderInit;
AudioRenderInit.info = *m_pAudioInfo;
if(rControlParams.uiTrickMode == UMC::UMC_TRICK_MODES_FFW_FAST)
{
AudioRenderInit.info.sample_frequency *=2;
}
else if(rControlParams.uiTrickMode == UMC::UMC_TRICK_MODES_FFW_FASTER)
{
AudioRenderInit.info.sample_frequency *=4;
}
else if(rControlParams.uiTrickMode == UMC::UMC_TRICK_MODES_SFW_SLOW)
{
AudioRenderInit.info.sample_frequency /=2;
}
else if(rControlParams.uiTrickMode == UMC::UMC_TRICK_MODES_SFW_SLOWER)
{
AudioRenderInit.info.sample_frequency /=4;
}
VideoDecoderInit.lTrickModesFlag = rControlParams.uiTrickMode;
if(m_pVideoDecoder)
m_pVideoDecoder->SetParams(&VideoDecoderInit);
if(m_pAudioRender)
{
m_pAudioRender->SetParams(&AudioRenderInit, rControlParams.uiTrickMode);
m_pAudioRender->Pause(false);
}
return umcRes;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -