⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 audiouniteffect.cpp

📁 Audacity是一款用於錄音和編輯聲音的、免費的開放源碼軟體。它可以執行於Mac OS X、Microsoft Windows、GNU/Linux和其它作業系統
💻 CPP
📖 第 1 页 / 共 2 页
字号:
/**********************************************************************  Audacity: A Digital Audio Editor  AudioUnitEffect.cpp  Dominic Mazzoni**********************************************************************/#include "AudioUnitEffect.h"#include <wx/defs.h>#include <wx/button.h>#include <wx/control.h>#include <wx/frame.h>#include <wx/panel.h>#include <wx/sizer.h>#include <wx/settings.h>#if ((wxMAJOR_VERSION == 2) && (wxMINOR_VERSION < 6))#error Audio Units support requires wxMac 2.6#endif#include <wx/mac/private.h>//// Forward declarations of GUI classes//class AudioUnitGUIControl : public wxControl{ public:   inline AudioUnitGUIControl(wxWindow *parent, wxWindowID id,                              wxPoint pos, wxSize size,                              ControlRef controlRef) {      Create(parent, id, pos, size, controlRef);   }   virtual ~AudioUnitGUIControl();   bool Create(wxWindow *parent, wxWindowID id,               wxPoint pos, wxSize size,               ControlRef controlRef);   void OnMouse(wxMouseEvent &event); private:   short GetModifiers(wxMouseEvent &event);   DECLARE_EVENT_TABLE()};class AudioUnitDialog : public wxDialog{ public:   AudioUnitDialog(wxWindow *parent, wxWindowID id, wxString title,                   AudioUnit unit,                   AudioUnitCarbonView carbonView);   void OnOK(wxCommandEvent &event);   void OnCancel(wxCommandEvent &event);   void OnPreview(wxCommandEvent &event); private:   AudioUnit             mUnit;   AudioUnitGUIControl  *mGUIControl;   wxBoxSizer           *mMainSizer;   EventHandlerRef       mEventHandlerRef;   DECLARE_EVENT_TABLE()};//// AudioUnitEffect//AudioUnitEffect::AudioUnitEffect(wxString name, Component component):   mName(name),   mComponent(component){   OSErr result;   mUnit = NULL;   result = OpenAComponent(mComponent, &mUnit);   if (result != 0)      return;}AudioUnitEffect::~AudioUnitEffect(){   CloseComponent(mUnit);}wxString AudioUnitEffect::GetEffectName(){   return mName;}   wxString AudioUnitEffect::GetEffectAction(){   return wxString::Format(_("Performing Effect: %s"),                            (const char *)mName);}int AudioUnitEffect::GetEffectFlags(){   int flags = PLUGIN_EFFECT | PROCESS_EFFECT;   return flags;} bool AudioUnitEffect::Init(){   ComponentResult auResult;   mSupportsMono = SetRateAndChannels(mUnit, 1, mProjectRate);   mSupportsStereo = SetRateAndChannels(mUnit, 2, mProjectRate);   if (!mSupportsMono && !mSupportsStereo) {      mSupportsMono = SetRateAndChannels(mUnit, 1, 44100.0);      mSupportsStereo = SetRateAndChannels(mUnit, 2, 44100.0);            if (!mSupportsMono && !mSupportsStereo) {         printf("Audio Unit doesn't support mono or stereo.\n");         return false;      }   }   auResult = AudioUnitInitialize(mUnit);   if (auResult != 0) {      printf("Unable to initialize\n");      return false;   }   return true;}bool AudioUnitEffect::PromptUser(){   OSErr result;   ComponentDescription desc;   Component carbonViewComponent = NULL;   AudioUnitCarbonView carbonView = NULL;   GetComponentInfo(mComponent, &desc, 0, 0, 0);   carbonViewComponent = GetCarbonViewComponent(desc.componentSubType);   result = OpenAComponent(carbonViewComponent, &carbonView);   if (result != 0) {      printf("Couldn't open carbon view component\n");      return false;   }   AudioUnitDialog dlog(mParent, -1, mName,                        mUnit, carbonView);   dlog.CentreOnParent();   dlog.ShowModal();   CloseComponent(carbonView);      return (dlog.GetReturnCode() == wxID_OK);}   bool AudioUnitEffect::Process(){   TrackListIterator iter(mWaveTracks);   int count = 0;   Track *left = iter.First();   Track *right;   while(left) {      longSampleCount lstart, rstart;      sampleCount len;      GetSamples((WaveTrack *)left, &lstart, &len);            right = NULL;      if (left->GetLinked() && mSupportsStereo) {         right = iter.Next();                  GetSamples((WaveTrack *)right, &rstart, &len);      }      bool success = false;      if (!mSupportsStereo && right) {         // If the effect is mono, apply to each channel separately         success = ProcessStereo(count, (WaveTrack *)left, NULL,                                 lstart, 0, len);         if (success)            success = ProcessStereo(count, (WaveTrack *)right, NULL,                                    rstart, 0, len);      }      else success = ProcessStereo(count,                                   (WaveTrack *)left, (WaveTrack *)right,                                   lstart, rstart, len);      if (!success)         return false;         left = iter.Next();      count++;   }   return true;}   void AudioUnitEffect::End(){   AudioUnitUninitialize(mUnit);}void AudioUnitEffect::GetSamples(WaveTrack *track,                                 longSampleCount *start,                                 sampleCount *len){   double trackStart = track->GetStartTime();   double trackEnd = track->GetEndTime();   double t0 = mT0 < trackStart? trackStart: mT0;   double t1 = mT1 > trackEnd? trackEnd: mT1;      if (t1 > t0) {      *start = track->TimeToLongSamples(t0);      longSampleCount end = track->TimeToLongSamples(t1);      *len = (sampleCount)(end - *start);   }   else {      *start = 0;      *len  = 0;   }}bool AudioUnitEffect::SetRateAndChannels(AudioUnit unit,                                         int numChannels,                                         Float64 sampleRate){   AudioStreamBasicDescription  streamFormat;   ComponentResult              auResult;   auResult = AudioUnitSetProperty(unit, kAudioUnitProperty_SampleRate,                                   kAudioUnitScope_Global, 0,                                   &sampleRate, sizeof(Float64));   if (auResult != 0) {      printf("Didn't accept sample rate\n");      return false;   }   streamFormat.mSampleRate = sampleRate;   streamFormat.mFormatID = kAudioFormatLinearPCM;   streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked |      kAudioFormatFlagIsNonInterleaved;   streamFormat.mBitsPerChannel = 32;   streamFormat.mChannelsPerFrame = numChannels;   streamFormat.mFramesPerPacket = 1;   streamFormat.mBytesPerFrame = 4;   streamFormat.mBytesPerPacket = 4;      auResult = AudioUnitSetProperty(unit, kAudioUnitProperty_StreamFormat,                                   kAudioUnitScope_Input, 0,                                   &streamFormat,                                   sizeof(AudioStreamBasicDescription));   if (auResult != 0)      return false;   auResult = AudioUnitSetProperty(unit, kAudioUnitProperty_StreamFormat,                                   kAudioUnitScope_Output, 0,                                   &streamFormat,                                   sizeof(AudioStreamBasicDescription));   if (auResult != 0)      return false;   auResult = AudioUnitSetProperty(unit, kAudioUnitProperty_StreamFormat,                                   kAudioUnitScope_Global, 0,                                   &streamFormat,                                   sizeof(AudioStreamBasicDescription));   if (auResult != 0) {      printf("Didn't accept global stream format\n");      return false;   }   return true;}bool AudioUnitEffect::CopyParameters(AudioUnit srcUnit, AudioUnit dstUnit){   ComponentResult auResult;   int numParameters, i;   AudioUnitParameterID *parameters;   Float32 parameterValue;   UInt32 size;   // Get number of parameters by passing NULL in the data field and   // getting back the size of the parameter list   size = 0;   auResult = AudioUnitGetProperty(srcUnit, kAudioUnitProperty_ParameterList,                                   kAudioUnitScope_Global, 0,                                   NULL, &size);   if (auResult != 0) {      printf("Couldn't get number of parameters\n");      return false;   }   // Now get the list of all parameter IDs   numParameters = size / sizeof(AudioUnitParameterID);   parameters = new AudioUnitParameterID[numParameters];   auResult = AudioUnitGetProperty(srcUnit, kAudioUnitProperty_ParameterList,                                   kAudioUnitScope_Global, 0,                                   parameters, &size);   if (auResult != 0) {      printf("Couldn't get parameter list\n");      delete[] parameters;      return false;   }      // Copy the parameters from the main unit to the unit specific to   // this track   for(i=0; i<numParameters; i++) {      auResult = AudioUnitGetParameter(srcUnit, parameters[i],                                       kAudioUnitScope_Global, 0,                                       &parameterValue);      if (auResult != 0) {         printf("Couldn't get parameter %d: ID=%d\n", i, (int)parameters[i]);         continue;      }      auResult = AudioUnitSetParameter(dstUnit, parameters[i],                                       kAudioUnitScope_Global, 0,                                       parameterValue, 0);      if (auResult != 0)         printf("Couldn't set parameter %d: ID=%d\n", i, (int)parameters[i]);   }   delete[] parameters;   return true;}bool AudioUnitEffect::ProcessStereo(int count,                                    WaveTrack *left, WaveTrack *right,                                    longSampleCount lstart,                                    longSampleCount rstart,                                    sampleCount len){   int numChannels = (right != NULL? 2: 1);   Float64 sampleRate = left->GetRate();   AudioUnit trackUnit;   AURenderCallbackStruct callbackStruct;   AudioTimeStamp timeStamp;   ComponentResult auResult;   int waveTrackBlockSize;   UInt32 size, unitBlockSize;   float *leftBuffer, *rightBuffer;   // Audio Units cannot have their rate and number of channels set   // after they've been initialized.  So, we open a new audio unit   // for each track (or pair of tracks) and copy the parameters   // over.  The only area where this might sometimes present a   // problem is when the parameter is tied to the sample rate, and   // the sample rate for a track is different than the project sample   // rate.  The user can always work around this by making the track   // and project sample rates match temporarily, though.   auResult = OpenAComponent(mComponent, &trackUnit);   if (auResult != 0) {      printf("Couldn't open audio unit\n");      return false;   }   if (!SetRateAndChannels(trackUnit, numChannels, sampleRate)) {      printf("Unable to setup audio unit for channels=%d rate=%.1f\n",             numChannels, sampleRate);      CloseComponent(trackUnit);      return false;   }   unitBlockSize = 0;   size = sizeof(UInt32);   auResult = AudioUnitGetProperty(trackUnit,                                   kAudioUnitProperty_MaximumFramesPerSlice,                                   kAudioUnitScope_Global,                                   0,                                   &unitBlockSize,                                   &size);   if (unitBlockSize == 0 || auResult != 0) {      printf("Warning: didn't get audio unit's MaximumFramesPerSlice\n");      printf("Trying to set MaximumFramesPerSlice to 512\n");      unitBlockSize = 512;      auResult = AudioUnitSetProperty(trackUnit,                                      kAudioUnitProperty_MaximumFramesPerSlice,                                      kAudioUnitScope_Global,                                      0,                                      &unitBlockSize,                                      sizeof(UInt32));      if (auResult != 0)         printf("Unable to set MaximumFramesPerSlice, rendering may fail...\n");   }   auResult = AudioUnitInitialize(trackUnit);   if (auResult != 0) {         printf("Couldn't initialize audio unit\n");      CloseComponent(trackUnit);      return false;   }   if (!CopyParameters(mUnit, trackUnit)) {      AudioUnitUninitialize(trackUnit);      CloseComponent(trackUnit);      return false;   }   auResult = AudioUnitReset(trackUnit, kAudioUnitScope_Global, 0);   if (auResult != 0) {      printf("Reset failed.\n");      AudioUnitUninitialize(trackUnit);      CloseComponent(trackUnit);      return false;   }   callbackStruct.inputProc = SimpleAudioRenderCallback;   callbackStruct.inputProcRefCon = this;   auResult = AudioUnitSetProperty(trackUnit,                                   kAudioUnitProperty_SetRenderCallback,                                   kAudioUnitScope_Input,                                   0,                                   &callbackStruct,                                   sizeof(AURenderCallbackStruct));   if (auResult != 0) {      printf("Setting input render callback failed.\n");      AudioUnitUninitialize(trackUnit);      CloseComponent(trackUnit);      return false;   }   memset(&timeStamp, 0, sizeof(AudioTimeStamp));   timeStamp.mSampleTime = 0; // This is a double-precision number that should                              // accumulate the number of frames processed so far   timeStamp.mFlags = kAudioTimeStampSampleTimeValid;   waveTrackBlockSize = left->GetMaxBlockSize() * 2;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -