📄 pa_mac_core.c
字号:
int numChannelsRemaining;
Boolean deinterleavingNeeded;
int numFramesInOutputBuffer;
if( outOutputData == NULL )
{
DBUG(("PaOSX_HandleOutput: outOutputData == NULL\n"));
return noErr;
}
deinterleavingNeeded = past->past_NumOutputChannels != outOutputData->mBuffers[0].mNumberChannels;
numFramesInOutputBuffer = outOutputData->mBuffers[0].mDataByteSize /
(sizeof(float) * outOutputData->mBuffers[0].mNumberChannels);
if( pahsc->mode != PA_MODE_INPUT_ONLY )
{
/* If we are using output, then we need an empty output buffer. */
if( outOutputData->mNumberBuffers > 0 )
{
/* If we have multiple CoreAudio buffers, then we will need to deinterleave after conversion. */
if( deinterleavingNeeded )
{
numBytes = numFramesInOutputBuffer * sizeof(float) * past->past_NumOutputChannels;
/* Free old buffer if we are allocating new one. */
if ( (pahsc->output.streamInterleavingBuffer != NULL) &&
(pahsc->output.streamInterleavingBufferLen < numBytes) )
{
PaHost_FreeFastMemory( pahsc->output.streamInterleavingBuffer, pahsc->output.streamInterleavingBufferLen );
pahsc->output.streamInterleavingBuffer = NULL;
}
/* Allocate interleaving buffer if needed. */
if ( pahsc->output.streamInterleavingBuffer == NULL )
{
pahsc->output.streamInterleavingBufferLen = numBytes;
pahsc->output.streamInterleavingBuffer = (float *)PaHost_AllocateFastMemory( pahsc->output.streamInterleavingBufferLen );
}
outputNativeBufferfPtr = (void*)pahsc->output.streamInterleavingBuffer;
}
else
{
numBytes = outOutputData->mBuffers[0].mDataByteSize;
outputNativeBufferfPtr = (void*)outOutputData->mBuffers[0].mData;
}
/* Pull data from PA user through converter. */
err = AudioConverterFillBuffer(
pahsc->output.converter,
PaOSX_OutputConverterCallbackProc,
past,
&numBytes,
outputNativeBufferfPtr);
if( err != noErr )
{
PRINT_ERR("PaOSX_HandleOutput: AudioConverterFillBuffer failed", err);
goto error;
}
/* Deinterleave data from PortAudio and write to multiple CoreAudio buffers. */
if( deinterleavingNeeded )
{
int numInterleavedChannels = past->past_NumOutputChannels;
int i, currentInterleavedChannelIndex = 0;
numChannelsRemaining = numInterleavedChannels;
for( i=0; i<outOutputData->mNumberBuffers; i++ )
{
int numBufChannels = outOutputData->mBuffers[i].mNumberChannels;
int j;
/* Don't use more than we need or more than we have. */
int numChannelsUsedInThisBuffer = (numChannelsRemaining < numBufChannels ) ?
numChannelsRemaining : numBufChannels;
for( j=0; j<numChannelsUsedInThisBuffer; j++ )
{
int k;
float *dest = &((float *)outOutputData->mBuffers[i].mData)[ j ];
float *src = &pahsc->output.streamInterleavingBuffer[ currentInterleavedChannelIndex ];
/* Move one channel from interleaved buffer to CoreAudio buffer. */
for( k=0; k<numFramesInOutputBuffer; k++ )
{
*dest = *src;
dest += numBufChannels;
src += numInterleavedChannels;
}
currentInterleavedChannelIndex++;
}
numChannelsRemaining -= numChannelsUsedInThisBuffer;
if( numChannelsRemaining <= 0 ) break;
}
}
}
}
error:
return err;
}
/******************************************************************
* This callback is used when two separate devices are used for input and output.
* This often happens when using USB devices which present as two devices: input and output.
* It just writes its data to a FIFO so that it can be read by the main callback
* proc PaOSX_CoreAudioIOCallback().
*/
static OSStatus PaOSX_CoreAudioInputCallback (AudioDeviceID inDevice, const AudioTimeStamp* inNow,
const AudioBufferList* inInputData, const AudioTimeStamp* inInputTime,
AudioBufferList* outOutputData, const AudioTimeStamp* inOutputTime,
void* contextPtr)
{
OSStatus err = noErr;
internalPortAudioStream *past = (internalPortAudioStream *) contextPtr;
PaHostSoundControl *pahsc;
pahsc = (PaHostSoundControl *) past->past_DeviceData;
/* If there is a FIFO for input then write to it. */
if( (pahsc->ringBufferData != NULL) && (inInputData != NULL) )
{
err = PaOSX_WriteInputRingBuffer( past, inInputData );
if( err != noErr ) goto error;
}
error:
return err;
}
/******************************************************************
* This is the primary callback for CoreAudio.
* It can handle input and/or output for a single device.
* It takes input from CoreAudio, converts it and passes it to the
* PortAudio user callback. Then takes the PA results and passes it
* back to CoreAudio.
*/
static OSStatus PaOSX_CoreAudioIOCallback (AudioDeviceID inDevice, const AudioTimeStamp* inNow,
const AudioBufferList* inInputData, const AudioTimeStamp* inInputTime,
AudioBufferList* outOutputData, const AudioTimeStamp* inOutputTime,
void* contextPtr)
{
OSStatus err = noErr;
internalPortAudioStream *past;
PaHostSoundControl *pahsc;
past = (internalPortAudioStream *) contextPtr;
pahsc = (PaHostSoundControl *) past->past_DeviceData;
/* Has someone asked us to abort by calling Pa_AbortStream()? */
if( past->past_StopNow )
{
past->past_IsActive = 0; /* Will cause thread to return. */
}
/* Has someone asked us to stop by calling Pa_StopStream()
* OR has a user callback returned '1' to indicate finished.
*/
else if( past->past_StopSoon )
{
// FIXME - Pretend all done. Should wait for audio to play out but CoreAudio latency very low.
past->past_IsActive = 0; /* Will cause thread to return. */
}
else
{
/* use time stamp from CoreAudio if valid */
if( inOutputTime->mFlags & kAudioTimeStampSampleTimeValid)
{
past->past_FrameCount = inOutputTime->mSampleTime;
}
else if( inInputTime->mFlags & kAudioTimeStampSampleTimeValid)
{
past->past_FrameCount = inInputTime->mSampleTime;
}
past->past_NumCallbacks += 1;
/* Process full input buffer. */
err = PaOSX_HandleInput( past, inInputData );
if( err != 0 ) goto error;
/* Fill up empty output buffers. */
err = PaOSX_HandleOutput( past, outOutputData );
if( err != 0 ) goto error;
}
if( err != 0 ) DBUG(("PaOSX_CoreAudioIOCallback: returns %ld.\n", err ));
error:
return err;
}
/*******************************************************************/
/** Attempt to set device sample rate.
* This is not critical because we use an AudioConverter but we may
* get better fidelity if we can avoid resampling.
*
* Only set format once because some devices take time to settle.
* Return flag indicating whether format changed so we know whether to wait
* for DevicePropertyListener to get called.
*
* @return negative error, zero if no change, or one if changed successfully.
*/
static PaError PaOSX_SetFormat( AudioDeviceID devID, Boolean isInput,
double desiredRate, int desiredNumChannels )
{
AudioStreamBasicDescription formatDesc;
PaError result = 0;
OSStatus err;
UInt32 dataSize;
Float64 originalRate;
int originalChannels;
/* Get current device format. This is critical because if we pass
* zeros for unspecified fields then the iMic device gets switched to a 16 bit
* integer format!!! I don't know if this is a Mac bug or not. But it only
* started happening when I upgraded from OS X V10.1 to V10.2 (Jaguar).
*/
dataSize = sizeof(formatDesc);
err = AudioDeviceGetProperty( devID, 0, isInput,
kAudioDevicePropertyStreamFormat, &dataSize, &formatDesc);
if( err != noErr )
{
PRINT_ERR("PaOSX_SetFormat: Could not get format.", err);
sSavedHostError = err;
return paHostError;
}
originalRate = formatDesc.mSampleRate;
originalChannels = formatDesc.mChannelsPerFrame;
// Is it already set to the correct format?
if( (originalRate != desiredRate) || (originalChannels != desiredNumChannels) )
{
DBUG(("PaOSX_SetFormat: try to change sample rate to %f.\n", desiredRate ));
DBUG(("PaOSX_SetFormat: try to set number of channels to %d\n", desiredNumChannels));
formatDesc.mSampleRate = desiredRate;
formatDesc.mChannelsPerFrame = desiredNumChannels;
formatDesc.mBytesPerFrame = formatDesc.mChannelsPerFrame * sizeof(float);
formatDesc.mBytesPerPacket = formatDesc.mBytesPerFrame * formatDesc.mFramesPerPacket;
err = AudioDeviceSetProperty( devID, 0, 0,
isInput, kAudioDevicePropertyStreamFormat, sizeof(formatDesc), &formatDesc);
if (err != noErr)
{
/* Could not set to desired rate so query for closest match. */
dataSize = sizeof(formatDesc);
err = AudioDeviceGetProperty( devID, 0,
isInput, kAudioDevicePropertyStreamFormatMatch, &dataSize, &formatDesc);
DBUG(("PaOSX_SetFormat: closest rate is %f.\n", formatDesc.mSampleRate ));
DBUG(("PaOSX_SetFormat: closest numChannels is %d.\n", (int)formatDesc.mChannelsPerFrame ));
// Set to closest if different from original.
if( (err == noErr) &&
((originalRate != formatDesc.mSampleRate) ||
(originalChannels != formatDesc.mChannelsPerFrame)) )
{
err = AudioDeviceSetProperty( devID, 0, 0,
isInput, kAudioDevicePropertyStreamFormat, sizeof(formatDesc), &formatDesc);
if( err == noErr ) result = 1;
}
}
else result = 1;
}
return result;
}
/*******************************************************************
* Check volume level of device. If below threshold, then set to newLevel.
* Using volume instead of decibels because decibel range varies by device.
*/
static void PaOSX_FixVolumeScalars( AudioDeviceID devID, Boolean isInput,
int numChannels, double threshold, double newLevel )
{
OSStatus err = noErr;
UInt32 dataSize;
int iChannel;
/* The master channel is 0. Left and right are channels 1 and 2. */
/* Fix volume. */
for( iChannel = 0; iChannel<=numChannels; iChannel++ )
{
Float32 fdata32;
dataSize = sizeof( fdata32 );
err = AudioDeviceGetProperty( devID, iChannel, isInput,
kAudioDevicePropertyVolumeScalar, &dataSize, &fdata32 );
if( err == noErr )
{
DBUG(("kAudioDevicePropertyVolumeScalar for channel %d = %f\n", iChannel, fdata32));
if( fdata32 <= (Float32) threshold )
{
dataSize = sizeof( fdata32 );
fdata32 = (Float32) newLevel;
err = AudioDeviceSetProperty( devID, 0, iChannel, isInput,
kAudioDevicePropertyVolumeScalar, dataSize, &fdata32 );
if( err != noErr )
{
PRINT(("Warning: audio volume is very low and could not be turned up.\n"));
}
else
{
PRINT(("Volume for audio channel %d was <= %4.2f so set to %4.2f by PortAudio!\n",
iChannel, threshold, newLevel ));
}
}
}
}
/* Unmute if muted. */
for( iChannel = 0; iChannel<=numChannels; iChannel++ )
{
UInt32 uidata32;
dataSize = sizeof( uidata32 );
err = AudioDeviceGetProperty( devID, iChannel, isInput,
kAudioDevicePropertyMute, &dataSize, &uidata32 );
if( err == noErr )
{
DBUG(("mute for channel %d = %ld\n", iChannel, uidata32));
if( uidata32 == 1 ) // muted?
{
dataSize = sizeof( uidata32 );
uidata32 = 0; // unmute
err = AudioDeviceSetProperty( devID, 0, iChannel, isInput,
kAudioDevicePropertyMute, dataSize, &uidata32 );
if( err != noErr )
{
PRINT(("Warning: audio is muted and could not be unmuted!\n"));
}
else
{
PRINT(("Audio channel %d was unmuted by PortAudio!\n", iChannel ));
}
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -