📄 playbacknode.cc
字号:
void PaPlaybackNode::NodeRegistered()
{
DBUG(("NodeRegistered() called.\n"));
/* Start the BMediaEventLooper thread */
SetPriority(B_REAL_TIME_PRIORITY);
Run();
/* set up as much information about our output as we can */
mOutput.source.port = ControlPort();
mOutput.source.id = 0;
mOutput.node = Node();
::strcpy(mOutput.name, "PortAudio Playback");
}
void PaPlaybackNode::HandleEvent( const media_timed_event *event,
bigtime_t lateness, bool realTimeEvent )
{
// DBUG(("HandleEvent() called.\n"));
status_t err;
switch(event->type)
{
case BTimedEventQueue::B_START:
DBUG((" Handling a B_START event\n"));
if( RunState() != B_STARTED )
{
mStartTime = event->event_time + EventLatency();
mSamplesSent = 0;
mAborted = false;
mRunning = true;
media_timed_event firstEvent( mStartTime,
BTimedEventQueue::B_HANDLE_BUFFER );
EventQueue()->AddEvent( firstEvent );
}
break;
case BTimedEventQueue::B_STOP:
DBUG((" Handling a B_STOP event\n"));
mRunning = false;
EventQueue()->FlushEvents( 0, BTimedEventQueue::B_ALWAYS, true,
BTimedEventQueue::B_HANDLE_BUFFER );
break;
case BTimedEventQueue::B_HANDLE_BUFFER:
//DBUG((" Handling a B_HANDLE_BUFFER event\n"));
/* make sure we're started and connected */
if( RunState() != BMediaEventLooper::B_STARTED ||
mOutput.destination == media_destination::null )
break;
BBuffer *buffer = FillNextBuffer(event->event_time);
/* make sure we weren't aborted while this routine was running.
* this can happen in one of two ways: either the callback returned
* nonzero (in which case mAborted is set in FillNextBuffer() ) or
* the client called AbortStream */
if( mAborted )
{
if( buffer )
buffer->Recycle();
Stop(0, true);
break;
}
if( buffer )
{
err = SendBuffer(buffer, mOutput.destination);
if( err != B_OK )
buffer->Recycle();
}
mSamplesSent += mOutput.format.u.raw_audio.buffer_size / mOutputSampleWidth;
/* Now schedule the next buffer event, so we can send another
* buffer when this one runs out. We calculate when it should
* happen by calculating when the data we just sent will finish
* playing.
*
* NOTE, however, that the event will actually get generated
* earlier than we specify, to account for the latency it will
* take to produce the buffer. It uses the latency value we
* specified in SetEventLatency() to determine just how early
* to generate it. */
/* totalPerformanceTime includes the time represented by the buffer
* we just sent */
bigtime_t totalPerformanceTime = (bigtime_t)((double)mSamplesSent /
(double)mOutput.format.u.raw_audio.channel_count /
(double)mOutput.format.u.raw_audio.frame_rate * 1000000.0);
bigtime_t nextEventTime = mStartTime + totalPerformanceTime;
media_timed_event nextBufferEvent(nextEventTime,
BTimedEventQueue::B_HANDLE_BUFFER);
EventQueue()->AddEvent(nextBufferEvent);
break;
}
}
/*************************
*
* BBufferProducer methods
*
*/
status_t PaPlaybackNode::FormatSuggestionRequested( media_type type,
int32 /*quality*/, media_format* format )
{
/* the caller wants to know this node's preferred format and provides
* a suggestion, asking if we support it */
DBUG(("FormatSuggestionRequested() called.\n"));
if(!format)
return B_BAD_VALUE;
*format = mPreferredFormat;
/* we only support raw audio (a wildcard is okay too) */
if ( type == B_MEDIA_UNKNOWN_TYPE || type == B_MEDIA_RAW_AUDIO )
return B_OK;
else
return B_MEDIA_BAD_FORMAT;
}
status_t PaPlaybackNode::FormatProposal( const media_source& output,
media_format* format )
{
/* This is similar to FormatSuggestionRequested(), but it is actually part
* of the negotiation process. We're given the opportunity to specify any
* properties that are wildcards (ie. properties that the other node doesn't
* care one way or another about) */
DBUG(("FormatProposal() called.\n"));
/* Make sure this proposal really applies to our output */
if( output != mOutput.source )
return B_MEDIA_BAD_SOURCE;
/* We return two things: whether we support the proposed format, and our own
* preferred format */
*format = mPreferredFormat;
if( format->type == B_MEDIA_UNKNOWN_TYPE || format->type == B_MEDIA_RAW_AUDIO )
return B_OK;
else
return B_MEDIA_BAD_FORMAT;
}
status_t PaPlaybackNode::FormatChangeRequested( const media_source& source,
const media_destination& destination, media_format* io_format, int32* )
{
/* we refuse to change formats, supporting only 1 */
DBUG(("FormatChangeRequested() called.\n"));
return B_ERROR;
}
status_t PaPlaybackNode::GetNextOutput( int32* cookie, media_output* out_output )
{
/* this is where we allow other to enumerate our outputs -- the cookie is
* an integer we can use to keep track of where we are in enumeration. */
DBUG(("GetNextOutput() called.\n"));
if( *cookie == 0 )
{
*out_output = mOutput;
*cookie = 1;
return B_OK;
}
return B_BAD_INDEX;
}
status_t PaPlaybackNode::DisposeOutputCookie( int32 cookie )
{
DBUG(("DisposeOutputCookie() called.\n"));
return B_OK;
}
void PaPlaybackNode::LateNoticeReceived( const media_source& what,
bigtime_t how_much, bigtime_t performance_time )
{
/* This function is called as notification that a buffer we sent wasn't
* received by the time we stamped it with -- it got there late. Basically,
* it means we underestimated our own latency, so we should increase it */
DBUG(("LateNoticeReceived() called.\n"));
if( what != mOutput.source )
return;
if( RunMode() == B_INCREASE_LATENCY )
{
mInternalLatency += how_much;
SetEventLatency( mDownstreamLatency + mInternalLatency );
DBUG(("Increasing latency to %Ld\n", mDownstreamLatency + mInternalLatency));
}
else
DBUG(("I don't know what to do with this notice!"));
}
void PaPlaybackNode::EnableOutput( const media_source& what, bool enabled,
int32* )
{
DBUG(("EnableOutput() called.\n"));
/* stub -- we don't support this yet */
}
status_t PaPlaybackNode::PrepareToConnect( const media_source& what,
const media_destination& where, media_format* format,
media_source* out_source, char* out_name )
{
/* the final stage of format negotiations. here we _must_ make specific any
* remaining wildcards */
DBUG(("PrepareToConnect() called.\n"));
/* make sure this really refers to our source */
if( what != mOutput.source )
return B_MEDIA_BAD_SOURCE;
/* make sure we're not already connected */
if( mOutput.destination != media_destination::null )
return B_MEDIA_ALREADY_CONNECTED;
if( format->type != B_MEDIA_RAW_AUDIO )
return B_MEDIA_BAD_FORMAT;
if( format->u.raw_audio.format != mPreferredFormat.u.raw_audio.format )
return B_MEDIA_BAD_FORMAT;
if( format->u.raw_audio.buffer_size ==
media_raw_audio_format::wildcard.buffer_size )
{
DBUG(("We were left to decide buffer size: choosing 2048"));
format->u.raw_audio.buffer_size = 2048;
}
else
DBUG(("Using consumer specified buffer size of %lu.\n",
format->u.raw_audio.buffer_size));
/* Reserve the connection, return the information */
mOutput.destination = where;
mOutput.format = *format;
*out_source = mOutput.source;
strncpy( out_name, mOutput.name, B_MEDIA_NAME_LENGTH );
return B_OK;
}
void PaPlaybackNode::Connect(status_t error, const media_source& source,
const media_destination& destination, const media_format& format, char* io_name)
{
DBUG(("Connect() called.\n"));
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -