📄 rarender.cpp
字号:
{
if (IsCrossfadeInProgress())
{
pnr = AttemptCrossfadeTermination(uLowest,
ulActualTimestamp,
(pnr == HXR_STREAM_DONE));
}
}
// if I didn't write audio on the current dry notification stream but I
// did on another stream, make that stream the current dry notification
// stream
if ((pnr == HXR_OK) &&
(uLowest != NO_STREAM_SET) &&
(!IsCrossfadeInProgress()) &&
(uLowest != m_usCurrentDryNotificationStream))
{
DEBUG_OUTF(DOAUDIO_FILE, (s, "New DN stream: %u\t%lu\n", uLowest, pnr));
AddDryNotification(uLowest);
}
// update the out param
ulAudioTime = audioData.ulAudioTime;
return pnr;
}
BOOL CRealAudioRenderer::HaveDataToWrite()
{
BOOL bRet = FALSE;
for (UINT16 i = 0; !bRet && (i < m_uNumOfSubStreams); i++)
{
// we delt with the current dry notification stream above
if (m_pRaFormats[i]->IsActive())
{
UINT32 ulCurrentStartTime = 0;
UINT32 ulCurrentEndTime = 0;
HX_RESULT pnr = m_pRaFormats[i]->
GetNextAudioDataTime(ulCurrentStartTime, ulCurrentEndTime);
if ((pnr == HXR_OK) && (ulCurrentStartTime != NO_TIME_SET))
{
bRet = TRUE;
}
}
}
return bRet;
}
/////////////////////////////////////////////////////////////////////////////
// Method:
// CRealAudioRenderer::FindLowestStartTime
HX_RESULT
CRealAudioRenderer::FindLowestStartTime(UINT16& uLowest, UINT32& ulLowestStartTime,
UINT32& ulLowestEndTime, UINT16& nActive)
{
UINT16 i;
UINT32 ulCurrentStartTime = 0;
UINT32 ulCurrentEndTime = 0;
HX_RESULT pnr = HXR_OK;
uLowest = NO_STREAM_SET;
ulLowestStartTime = NO_TIME_SET;
nActive = 0;
for (i = 0; i < m_uNumOfSubStreams; i++)
{
// we delt with the current dry notification stream above
if (m_pRaFormats[i]->IsActive())
{
nActive++;
pnr = m_pRaFormats[i]->
GetNextAudioDataTime(ulCurrentStartTime, ulCurrentEndTime);
if ((pnr == HXR_OK) &&
(ulCurrentStartTime != NO_TIME_SET) &&
((ulLowestStartTime == NO_TIME_SET) ||
IsTimeLess(ulCurrentStartTime, ulLowestStartTime)))
{
uLowest = i;
ulLowestStartTime = ulCurrentStartTime;
ulLowestEndTime = ulCurrentEndTime;
}
}
}
return HXR_OK;
}
/////////////////////////////////////////////////////////////////////////////
// Method:
// CRealAudioRenderer::FindLongestOverlap
HX_RESULT
CRealAudioRenderer::FindLongestOverlap(UINT16 uLowest, UINT32 ulLowestEndTime,
UINT16 nActive, UINT16& uLongestOverlap,
AUDIO_STATE& audioState)
{
uLongestOverlap = NO_STREAM_SET;
return HXR_OK;
}
HX_RESULT CRealAudioRenderer::AttemptCrossfade(UINT16 uLowest,
UINT16 uLongetOverlap,
UINT32 ulLatestActualTime,
UINT32 ulLatestStreamTime,
UINT32& ulFromStreamTimeStart,
UINT32& ulFromActualTimeStart,
AUDIO_STATE audioState)
{
return HXR_FAIL;
}
HX_RESULT
CRealAudioRenderer::AttemptCrossfadeTermination(UINT16& uStream,
UINT32 ulActualTimestamp,
BOOL bStreamDone)
{
uStream = NO_STREAM_SET;
return (bStreamDone ? HXR_STREAM_DONE : HXR_OK);
}
/////////////////////////////////////////////////////////////////////////
// Method:
// IHXRenderer::OnPacket
// Purpose:
// Called by client engine when a packet for this renderer is
// due.
// lTimeOffset is the amount of time that we lag behind the main player time line
// so if the start time of the track is 10 seconds, lTimeOffset will be 10000 (msec)
// (the first packet's time stamp will be 0 but the player will be at time=10sec)
//
STDMETHODIMP
CRealAudioRenderer::OnPacket(IHXPacket* pPacket, LONG32 lTimeOffset)
{
#if defined(RA_TEST_LOSS)
static INT32 lNum = 0;
if (lNum >= 100 && lNum < 106)
{
IHXPacket* pLostPacket = NULL;
m_pCommonClassFactory->CreateInstance(CLSID_IHXPacket, (void**) &pLostPacket);
if (pLostPacket)
{
pLostPacket->Set(0, 0, pPacket->GetStreamNumber(), 0, 0);
pLostPacket->SetAsLost();
pPacket = pLostPacket;
}
}
lNum++;
#endif
#if defined(HELIX_FEATURE_AUDIO_CODEC_RAAC)
HX_RESULT retVal = HXR_FAIL;
if (pPacket)
{
if (!pPacket->IsLost())
{
// Get the substream of this packet
UINT16 usSubStream = 0;
UINT16 usASMRuleNum = pPacket->GetASMRuleNumber();
if (m_bStreamSwitchable && m_pRuleMap && usASMRuleNum < m_uNumOfRules)
{
usSubStream = m_pRuleMap[usASMRuleNum];
}
if (usSubStream < m_uNumOfSubStreams)
{
// If we have a VBR depacketizer for this substream,
// then run this packet through it. Otherwise, send
// the packet on to _OnPacket() as normal.
if (m_ppVBRDepack[usSubStream])
{
m_ppVBRDepack[usSubStream]->PutPacket(pPacket);
retVal = HXR_OK;
HX_RESULT rv = HXR_OK;
while (SUCCEEDED(rv) && SUCCEEDED(retVal))
{
IHXPacket* pOutPacket = NULL;
rv = m_ppVBRDepack[usSubStream]->GetPacket(pOutPacket);
if (SUCCEEDED(rv))
{
retVal = _OnPacket(pOutPacket, lTimeOffset);
}
HX_RELEASE(pOutPacket);
}
}
else
{
retVal = _OnPacket(pPacket, lTimeOffset);
}
}
}
else
{
retVal = _OnPacket(pPacket, lTimeOffset);
}
}
return retVal;
#else
return _OnPacket(pPacket, lTimeOffset);
#endif
}
/////////////////////////////////////////////////////////////////////////
// Method:
// IHXRenderer::OnTimeSync
// Purpose:
// Called by client engine to inform the renderer of the current
// time relative to the streams synchronized time-line. The
// renderer should use this time value to update its display or
// render it's stream data accordingly.
//
STDMETHODIMP CRealAudioRenderer::OnTimeSync(ULONG32 ulTime)
{
HX_RESULT retVal = HXR_OK;
m_pMutex->Lock();
// if we get a timesync we must be playing
m_PlayState = playing;
// Here's a good time to actually render the data!
m_ulCurrentTimelineTime = ulTime;
/***
if (m_usCurrentDryNotificationStream != NO_TIME_SET)
{
DEBUG_OUTF_IDX(m_usCurrentDryNotificationStream, RA_FLOW_FILE,
(s, "Time Sync: %u\n", m_ulCurrentTimelineTime));
}
***/
DEBUG_OUTF(RASYNC_FILE, (s, "RA\tOnTimeSync:\t%lu\n", ulTime));
// unregister any time syncs.
UnregisterTimeSyncs(ulTime);
#ifdef _MACINTOSH
/* On Mac, since we do not have Mutex, we do not want to process
* data if we are within OnPacket call
*/
if (m_bProcessingPacket)
{
goto exit;
}
m_bProcessingPacket = TRUE;
#endif /*_MACINTOSH*/
// Write to AS
UINT32 ulAudioTime;
retVal = DoAudio(ulAudioTime);
if (m_ulCurrentGranularity < MINIMUM_GRANULARITY)
{
// if our granularity is less than the minimum granularity of the system.
// then call do audio again. We should always decode at least twice the
// amount of data as our granularity.
retVal = DoAudio(ulAudioTime);
}
if( retVal == HXR_OUTOFMEMORY )
{
if( m_pErrorMessages )
{
m_pErrorMessages->Report( HXLOG_ERR, retVal, 0, NULL, NULL );
}
}
else
{
retVal = HXR_OK;
}
#ifdef _MACINTOSH
m_bProcessingPacket = FALSE;
exit:
#endif /*_MACINTOSH*/
m_pMutex->Unlock();
return retVal;
}
/////////////////////////////////////////////////////////////////////////
// Method:
// IHXRenderer::OnPreSeek
// Purpose:
// Called by client engine to inform the renderer that a seek is
// about to occur. The render is informed the last time for the
// stream's time line before the seek, as well as the first new
// time for the stream's time line after the seek will be completed.
//
STDMETHODIMP CRealAudioRenderer::OnPreSeek(ULONG32 ulOldTime,
ULONG32 ulNewTime)
{
m_PlayState = seeking;
m_ulCurrentTimelineTime = ulNewTime;
m_bEndOfPackets = FALSE;
m_bDoneWritingPackets = FALSE;
m_bInSeekMode = TRUE;
m_bFirstPacket = TRUE;
DEBUG_OUTF(RASYNC_FILE, (s, "RA\tPreSeek\t%lu\t%lu\n", ulOldTime, ulNewTime));
UINT16 i = 0;
for (i = 0; i < m_uNumOfSubStreams; i++)
{
m_pRaFormats[i]->OnSeek(ulOldTime,ulNewTime);
m_pRaFormats[i]->m_ulLastPacketTime = NO_TIME_SET;
m_pRaFormats[i]->m_ulBytesWrite = 0;
if (m_pRaFormats[i]->m_pAudioSync && m_pRaFormats[i]->m_bRegistered)
{
m_pRaFormats[i]->m_bRegistered = FALSE;
m_pRaFormats[i]->m_pAudioSync->UnRegister();
DEBUG_OUTF_IDX(i, RA_FLOW_FILE,
(s, "Sync Stop\n"));
}
}
// flush the unregister queue since we just unregistered every stream
FlushUnregisterQueue();
m_ulLatestStreamTime = NO_TIME_SET;
m_ulLatestActualTime = NO_TIME_SET;
m_fLatestStreamTime = 0.0;
m_bAllStreamsToBeUnregistered = FALSE;
if (m_usCurrentDryNotificationStream == NO_TIME_SET)
{
AddDryNotification(DEFAULT_DRY_NOTIFICATION);
}
return HXR_OK;
}
/////////////////////////////////////////////////////////////////////////
// Method:
// IHXRenderer::OnPostSeek
// Purpose:
// Called by client engine to inform the renderer that a seek has
// just occured. The render is informed the last time for the
// stream's time line before the seek, as well as the first new
// time for the stream's time line after the seek.
//
STDMETHODIMP CRealAudioRenderer::OnPostSeek(ULONG32 ulOldTime, ULONG32 ulNewTime)
{
DEBUG_OUTF(RASYNC_FILE, (s, "RA\tPostSeek\t%lu\t%lu\n", ulOldTime, ulNewTime));
#if defined(HELIX_FEATURE_AUDIO_CODEC_RAAC)
for (UINT16 i = 0; i < m_uNumOfSubStreams; i++)
{
if (m_ppVBRDepack[i])
{
m_ppVBRDepack[i]->OnSeek(ulOldTime, ulNewTime);
}
}
#endif /* #if defined(HELIX_FEATURE_AUDIO_CODEC_RAAC) */
m_bInSeekMode = FALSE;
return HXR_OK;
}
/////////////////////////////////////////////////////////////////////////
// Method:
// IHXRenderer::OnPause
// Purpose:
// Called by client engine to inform the renderer that a pause has
// just occured. The render is informed the last time for the
// stream's time line before the pause.
//
STDMETHODIMP CRealAudioRenderer::OnPause(ULONG32 ulTime)
{
m_PlayState = paused;
return HXR_OK;
}
/////////////////////////////////////////////////////////////////////////
// Method:
// IHXRenderer::OnBegin
// Purpose:
// Called by client engine to inform the renderer that a begin or
// resume has just occured. The render is informed the first time
// for the stream's time line after the resume.
//
STDMETHODIMP CRealAudioRenderer::OnBegin(ULONG32 ulTime)
{
return HXR_OK;
}
/////////////////////////////////////////////////////////////////////////
// Method:
// IHXRenderer::OnBuffering
// Purpose:
// Called by client engine to inform the renderer that buffering
// of data is occuring. The render is informed of the reason for
// the buffering (start-up of stream, seek has occured, network
// congestion, etc.), as well as percentage complete of the
// buffering process.
//
STDMETHODIMP CRealAudioRenderer::OnBuffering(ULONG32 ulFlags, UINT16 unPercentComplete)
{
m_PlayState = buffering;
return HXR_OK;
}
/////////////////////////////////////////////////////////////////////////
// Method:
// IHXRenderer::GetDisplayType
// Purpose:
// Called by client engine to ask the renderer for it's preferred
// display type. When layout information is not present, the
// renderer will be asked for it's prefered display type. Depending
// on the display type a buffer of additional information may be
// needed. This buffer could contain information
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -