⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 rtaudio.cpp

📁 Mobile STK for Symbian OS V0.1
💻 CPP
📖 第 1 页 / 共 5 页
字号:
    error(RtError::DEBUG_WARNING);    return;  }  // Set the format  i = format;  if (ioctl(fd, SNDCTL_DSP_SETFMT, &format) == -1 || format != i) {    close(fd);    sprintf(message_, "RtApiOss: device (%s) error setting data format.",            info->name.c_str());    error(RtError::DEBUG_WARNING);    return;  }  // Probe the supported sample rates.  info->sampleRates.clear();  for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {    int speed = SAMPLE_RATES[k];    if (ioctl(fd, SNDCTL_DSP_SPEED, &speed) != -1 && speed == (int)SAMPLE_RATES[k])      info->sampleRates.push_back(speed);  }  if (info->sampleRates.size() == 0) {    close(fd);    sprintf(message_, "RtApiOss: no supported sample rates found for device (%s).",            info->name.c_str());    error(RtError::DEBUG_WARNING);    return;  }  // That's all ... close the device and return  close(fd);  info->probed = true;  return;}bool RtApiOss :: probeDeviceOpen(int device, StreamMode mode, int channels,                                 int sampleRate, RtAudioFormat format,                                int *bufferSize, int numberOfBuffers){  int buffers, buffer_bytes, device_channels, device_format;  int srate, temp, fd;  int *handle = (int *) stream_.apiHandle;  const char *name = devices_[device].name.c_str();  if (mode == OUTPUT)    fd = open(name, O_WRONLY | O_NONBLOCK);  else { // mode == INPUT    if (stream_.mode == OUTPUT && stream_.device[0] == device) {      // We just set the same device for playback ... close and reopen for duplex (OSS only).      close(handle[0]);      handle[0] = 0;      // First check that the number previously set channels is the same.      if (stream_.nUserChannels[0] != channels) {        sprintf(message_, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name);        goto error;      }      fd = open(name, O_RDWR | O_NONBLOCK);    }    else      fd = open(name, O_RDONLY | O_NONBLOCK);  }  if (fd == -1) {    if (errno == EBUSY || errno == EAGAIN)      sprintf(message_, "RtApiOss: device (%s) is busy and cannot be opened.",              name);    else      sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);    goto error;  }  // Now reopen in blocking mode.  close(fd);  if (mode == OUTPUT)    fd = open(name, O_WRONLY | O_SYNC);  else { // mode == INPUT    if (stream_.mode == OUTPUT && stream_.device[0] == device)      fd = open(name, O_RDWR | O_SYNC);    else      fd = open(name, O_RDONLY | O_SYNC);  }  if (fd == -1) {    sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);    goto error;  }  // Get the sample format mask  int mask;  if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {    close(fd);    sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",            name);    goto error;  }  // Determine how to set the device format.  stream_.userFormat = format;  device_format = -1;  stream_.doByteSwap[mode] = false;  if (format == RTAUDIO_SINT8) {    if (mask & AFMT_S8) {      device_format = AFMT_S8;      stream_.deviceFormat[mode] = RTAUDIO_SINT8;    }  }  else if (format == RTAUDIO_SINT16) {    if (mask & AFMT_S16_NE) {      device_format = AFMT_S16_NE;      stream_.deviceFormat[mode] = RTAUDIO_SINT16;    }#if BYTE_ORDER == LITTLE_ENDIAN    else if (mask & AFMT_S16_BE) {      device_format = AFMT_S16_BE;      stream_.deviceFormat[mode] = RTAUDIO_SINT16;      stream_.doByteSwap[mode] = true;    }#else    else if (mask & AFMT_S16_LE) {      device_format = AFMT_S16_LE;      stream_.deviceFormat[mode] = RTAUDIO_SINT16;      stream_.doByteSwap[mode] = true;    }#endif  }#if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)  else if (format == RTAUDIO_SINT32) {    if (mask & AFMT_S32_NE) {      device_format = AFMT_S32_NE;      stream_.deviceFormat[mode] = RTAUDIO_SINT32;    }#if BYTE_ORDER == LITTLE_ENDIAN    else if (mask & AFMT_S32_BE) {      device_format = AFMT_S32_BE;      stream_.deviceFormat[mode] = RTAUDIO_SINT32;      stream_.doByteSwap[mode] = true;    }#else    else if (mask & AFMT_S32_LE) {      device_format = AFMT_S32_LE;      stream_.deviceFormat[mode] = RTAUDIO_SINT32;      stream_.doByteSwap[mode] = true;    }#endif  }#endif  if (device_format == -1) {    // The user requested format is not natively supported by the device.    if (mask & AFMT_S16_NE) {      device_format = AFMT_S16_NE;      stream_.deviceFormat[mode] = RTAUDIO_SINT16;    }#if BYTE_ORDER == LITTLE_ENDIAN    else if (mask & AFMT_S16_BE) {      device_format = AFMT_S16_BE;      stream_.deviceFormat[mode] = RTAUDIO_SINT16;      stream_.doByteSwap[mode] = true;    }#else    else if (mask & AFMT_S16_LE) {      device_format = AFMT_S16_LE;      stream_.deviceFormat[mode] = RTAUDIO_SINT16;      stream_.doByteSwap[mode] = true;    }#endif#if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)    else if (mask & AFMT_S32_NE) {      device_format = AFMT_S32_NE;      stream_.deviceFormat[mode] = RTAUDIO_SINT32;    }#if BYTE_ORDER == LITTLE_ENDIAN    else if (mask & AFMT_S32_BE) {      device_format = AFMT_S32_BE;      stream_.deviceFormat[mode] = RTAUDIO_SINT32;      stream_.doByteSwap[mode] = true;    }#else    else if (mask & AFMT_S32_LE) {      device_format = AFMT_S32_LE;      stream_.deviceFormat[mode] = RTAUDIO_SINT32;      stream_.doByteSwap[mode] = true;    }#endif#endif    else if (mask & AFMT_S8) {      device_format = AFMT_S8;      stream_.deviceFormat[mode] = RTAUDIO_SINT8;    }  }  if (stream_.deviceFormat[mode] == 0) {    // This really shouldn't happen ...    close(fd);    sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",            name);    goto error;  }  // Determine the number of channels for this device.  Note that the  // channel value requested by the user might be < min_X_Channels.  stream_.nUserChannels[mode] = channels;  device_channels = channels;  if (mode == OUTPUT) {    if (channels < devices_[device].minOutputChannels)      device_channels = devices_[device].minOutputChannels;  }  else { // mode == INPUT    if (stream_.mode == OUTPUT && stream_.device[0] == device) {      // We're doing duplex setup here.      if (channels < devices_[device].minDuplexChannels)        device_channels = devices_[device].minDuplexChannels;    }    else {      if (channels < devices_[device].minInputChannels)        device_channels = devices_[device].minInputChannels;    }  }  stream_.nDeviceChannels[mode] = device_channels;  // Attempt to set the buffer size.  According to OSS, the minimum  // number of buffers is two.  The supposed minimum buffer size is 16  // bytes, so that will be our lower bound.  The argument to this  // call is in the form 0xMMMMSSSS (hex), where the buffer size (in  // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.  // We'll check the actual value used near the end of the setup  // procedure.  buffer_bytes = *bufferSize * formatBytes(stream_.deviceFormat[mode]) * device_channels;  if (buffer_bytes < 16) buffer_bytes = 16;  buffers = numberOfBuffers;  if (buffers < 2) buffers = 2;  temp = ((int) buffers << 16) + (int)(log10((double)buffer_bytes)/log10(2.0));  if (ioctl(fd, SNDCTL_DSP_SETFRAGMENT, &temp)) {    close(fd);    sprintf(message_, "RtApiOss: error setting fragment size for device (%s).",            name);    goto error;  }  stream_.nBuffers = buffers;  // Set the data format.  temp = device_format;  if (ioctl(fd, SNDCTL_DSP_SETFMT, &device_format) == -1 || device_format != temp) {    close(fd);    sprintf(message_, "RtApiOss: error setting data format for device (%s).",            name);    goto error;  }  // Set the number of channels.  temp = device_channels;  if (ioctl(fd, SNDCTL_DSP_CHANNELS, &device_channels) == -1 || device_channels != temp) {    close(fd);    sprintf(message_, "RtApiOss: error setting %d channels on device (%s).",            temp, name);    goto error;  }  // Set the sample rate.  srate = sampleRate;  temp = srate;  if (ioctl(fd, SNDCTL_DSP_SPEED, &srate) == -1) {    close(fd);    sprintf(message_, "RtApiOss: error setting sample rate = %d on device (%s).",            temp, name);    goto error;  }  // Verify the sample rate setup worked.  if (abs(srate - temp) > 100) {    close(fd);    sprintf(message_, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.",            name, temp);    goto error;  }  stream_.sampleRate = sampleRate;  if (ioctl(fd, SNDCTL_DSP_GETBLKSIZE, &buffer_bytes) == -1) {    close(fd);    sprintf(message_, "RtApiOss: error getting buffer size for device (%s).",            name);    goto error;  }  // Save buffer size (in sample frames).  *bufferSize = buffer_bytes / (formatBytes(stream_.deviceFormat[mode]) * device_channels);  stream_.bufferSize = *bufferSize;  if (mode == INPUT && stream_.mode == OUTPUT &&      stream_.device[0] == device) {    // We're doing duplex setup here.    stream_.deviceFormat[0] = stream_.deviceFormat[1];    stream_.nDeviceChannels[0] = device_channels;  }  // Allocate the stream handles if necessary and then save.  if ( stream_.apiHandle == 0 ) {    handle = (int *) calloc(2, sizeof(int));    stream_.apiHandle = (void *) handle;    handle[0] = 0;    handle[1] = 0;  }  else {    handle = (int *) stream_.apiHandle;  }  handle[mode] = fd;  // Set flags for buffer conversion  stream_.doConvertBuffer[mode] = false;  if (stream_.userFormat != stream_.deviceFormat[mode])    stream_.doConvertBuffer[mode] = true;  if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])    stream_.doConvertBuffer[mode] = true;  // Allocate necessary internal buffers  if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {    long buffer_bytes;    if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])      buffer_bytes = stream_.nUserChannels[0];    else      buffer_bytes = stream_.nUserChannels[1];    buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);    if (stream_.userBuffer) free(stream_.userBuffer);    stream_.userBuffer = (char *) calloc(buffer_bytes, 1);    if (stream_.userBuffer == NULL) {      close(fd);      sprintf(message_, "RtApiOss: error allocating user buffer memory (%s).",              name);      goto error;    }  }  if ( stream_.doConvertBuffer[mode] ) {    long buffer_bytes;    bool makeBuffer = true;    if ( mode == OUTPUT )      buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);    else { // mode == INPUT      buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);      if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {        long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);        if ( buffer_bytes < bytes_out ) makeBuffer = false;      }    }    if ( makeBuffer ) {      buffer_bytes *= *bufferSize;      if (stream_.deviceBuffer) free(stream_.deviceBuffer);      stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);      if (stream_.deviceBuffer == NULL) {        close(fd);        sprintf(message_, "RtApiOss: error allocating device buffer memory (%s).",                name);        goto error;      }    }  }  stream_.device[mode] = device;  stream_.state = STREAM_STOPPED;  if ( stream_.mode == OUTPUT && mode == INPUT ) {    stream_.mode = DUPLEX;    if (stream_.device[0] == device)      handle[0] = fd;  }  else    stream_.mode = mode;  // Setup the buffer conversion information structure.  if ( stream_.doConvertBuffer[mode] ) {    if (mode == INPUT) { // convert device to user buffer      stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];      stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];      stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];      stream_.convertInfo[mode].outFormat = stream_.userFormat;    }    else { // convert user to device buffer      stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];      stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];      stream_.convertInfo[mode].inFormat = stream_.userFormat;      stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];    }    if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )      stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;    else      stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;    // Set up the interleave/deinterleave offsets.    if ( mode == INPUT && stream_.deInterleave[1] ) {      for (int k=0; k<stream_.convertInfo[mode].channels; k++) {        stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );        stream_.convertInfo[mode].outOffset.push_back( k );        stream_.convertInfo[mode].inJump = 1;      }    }    else if (mode == OUTPUT && stream_.deInterleave[0]) {      for (int k=0; k<stream_.convertInfo[mode].channels; k++) {        stream_.convertInfo[mode].inOffset.push_back( k );        stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );        stream_.convertInfo[mode].outJump = 1;      }    }    else {      for (int k=0; k<stream_.convertInfo[mode].channels; k++) {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -