簡體   English   中英

如何在iOS中設置效果音頻單元

[英]how to setup Effect Audio Unit in iOS

我的任務是播放一個本地保存在文檔目錄中的音頻文件,使用效果音頻單元在該音頻文件中應用音頻效果,並將新的音頻文件保存在文檔目錄中。 這是我到目前為止編寫的代碼,但它不起作用。 音頻中未應用效果。 請告訴我這段代碼有什么問題? 提前致謝..

- (void) setUpAudioUnits
{
OSStatus setupErr = noErr;

// describe unit
AudioComponentDescription audioCompDesc;
audioCompDesc.componentType = kAudioUnitType_Output;
audioCompDesc.componentSubType = kAudioUnitSubType_RemoteIO;
audioCompDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
audioCompDesc.componentFlags = 0;
audioCompDesc.componentFlagsMask = 0;

// get rio unit from audio component manager
AudioComponent rioComponent = AudioComponentFindNext(NULL, &audioCompDesc);
setupErr = AudioComponentInstanceNew(rioComponent, &remoteIOUnit);
NSAssert (setupErr == noErr, @"Couldn't get RIO unit instance");

// set up the rio unit for playback
UInt32 oneFlag = 1;
AudioUnitElement outputElement = 0;
setupErr = 
AudioUnitSetProperty (remoteIOUnit,
                      kAudioOutputUnitProperty_EnableIO,
                      kAudioUnitScope_Output,
                      outputElement,
                      &oneFlag,
                      sizeof(oneFlag));
NSAssert (setupErr == noErr, @"Couldn't enable RIO output");

// enable rio input
AudioUnitElement inputElement = 1;

// setup an asbd in the iphone canonical format
AudioStreamBasicDescription myASBD;
memset (&myASBD, 0, sizeof (myASBD));
// myASBD.mSampleRate = 44100;
myASBD.mSampleRate = hardwareSampleRate;
myASBD.mFormatID = kAudioFormatLinearPCM;
myASBD.mFormatFlags = kAudioFormatFlagsCanonical;
myASBD.mBytesPerPacket = 4;
myASBD.mFramesPerPacket = 1;
myASBD.mBytesPerFrame = 4;
myASBD.mChannelsPerFrame = 2;
myASBD.mBitsPerChannel = 16;

/*
 // set format for output (bus 0) on rio's input scope
 */
setupErr =
AudioUnitSetProperty (remoteIOUnit,
                      kAudioUnitProperty_StreamFormat,
                      kAudioUnitScope_Input,
                      outputElement,
                      &myASBD,
                      sizeof (myASBD));
NSAssert (setupErr == noErr, @"Couldn't set ASBD for RIO on input scope / bus 0");


// song must be an LPCM file, preferably in caf container
// to convert, use /usr/bin/afconvert, like this:
//  /usr/bin/afconvert --data LEI16 Girlfriend.m4a song.caf

// read in the entire audio file (NOT recommended)
// better to use a ring buffer: thread or timer fills, render callback drains
NSURL *songURL = [NSURL fileURLWithPath:
                  [[NSBundle mainBundle] pathForResource: @"song"
                                                  ofType: @"caf"]];
AudioFileID songFile;
setupErr = AudioFileOpenURL((CFURLRef) songURL,
                            kAudioFileReadPermission,
                            0,
                            &songFile);
NSAssert (setupErr == noErr, @"Couldn't open audio file");

UInt64 audioDataByteCount;
UInt32 audioDataByteCountSize = sizeof (audioDataByteCount);
setupErr = AudioFileGetProperty(songFile,
                                kAudioFilePropertyAudioDataByteCount,
                                &audioDataByteCountSize,
                                &audioDataByteCount);
NSAssert (setupErr == noErr, @"Couldn't get size property");

musicPlaybackState.audioData = malloc (audioDataByteCount);
musicPlaybackState.audioDataByteCount = audioDataByteCount;
musicPlaybackState.samplePtr = musicPlaybackState.audioData;

NSLog (@"reading %qu bytes from file", audioDataByteCount);
UInt32 bytesRead = audioDataByteCount;
setupErr = AudioFileReadBytes(songFile,
                              false,
                              0,
                              &bytesRead,
                              musicPlaybackState.audioData);
NSAssert (setupErr == noErr, @"Couldn't read audio data");
NSLog (@"read %d bytes from file", bytesRead);

AudioStreamBasicDescription fileASBD;
UInt32 asbdSize = sizeof (fileASBD);
setupErr = AudioFileGetProperty(songFile,
                                kAudioFilePropertyDataFormat,
                                &asbdSize,
                                &fileASBD);
NSAssert (setupErr == noErr, @"Couldn't get file asbd");

ExtAudioFileCreateWithURL(outputFileURL,
                          kAudioFileCAFType,
                          &fileASBD,
                          nil,
                          kAudioFileFlags_EraseFile,
                          &musicPlaybackState.extAudioFile);

// get the mixer unit
AudioComponentDescription mixerDesc;
mixerDesc.componentType = kAudioUnitType_Effect;
mixerDesc.componentSubType = kAudioUnitSubType_Delay;
mixerDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
mixerDesc.componentFlags = 0;
mixerDesc.componentFlagsMask = 0;

// get mixer unit from audio component manager
AudioComponent mixerComponent = AudioComponentFindNext(NULL, &mixerDesc);
setupErr = AudioComponentInstanceNew(mixerComponent, &mixerUnit);
NSAssert (setupErr == noErr, @"Couldn't get mixer unit instance");

// set up connections and callbacks

// connect mixer bus 0 input to robot voice render callback
effectState.rioUnit = remoteIOUnit;
effectState.sineFrequency = 23;
effectState.sinePhase = 0;
effectState.asbd = myASBD;

// connect mixer bus 1 input to music player callback

AURenderCallbackStruct musicPlayerCallbackStruct;
musicPlayerCallbackStruct.inputProc = MusicPlayerCallback; // callback function
musicPlayerCallbackStruct.inputProcRefCon = &musicPlaybackState;

setupErr = 
AudioUnitSetProperty(mixerUnit, 
                     kAudioUnitProperty_SetRenderCallback,
                     kAudioUnitScope_Global,
                     outputElement,
                     &musicPlayerCallbackStruct,
                     sizeof (musicPlayerCallbackStruct));
NSAssert (setupErr == noErr, @"Couldn't set mixer render callback on bus 1");

// direct connect mixer to output
AudioUnitConnection connection;
connection.sourceAudioUnit = mixerUnit;
connection.sourceOutputNumber = outputElement;
connection.destInputNumber = outputElement;

setupErr = 
AudioUnitSetProperty(remoteIOUnit, 
                     kAudioUnitProperty_MakeConnection,
                     kAudioUnitScope_Input,
                     outputElement,
                     &connection,
                     sizeof (connection));
NSAssert (setupErr == noErr, @"Couldn't set mixer-to-RIO connection");

setupErr = AudioUnitInitialize(mixerUnit);
NSAssert (setupErr == noErr, @"Couldn't initialize mixer unit");

setupErr =  AudioUnitInitialize(remoteIOUnit);
NSAssert (setupErr == noErr, @"Couldn't initialize RIO unit");

    setupErr = AudioOutputUnitStart (remoteIOUnit);
 }

當你有初始化音頻單元的情況下,可以申請使用效果的聲音AudioUnitRender通過它提供AudioBufferList。

首先,確保音頻單元接受的格式有聲音。 您可以通過獲取kAudioUnitProperty_StreamFormat屬性來獲取此格式。

如果您的音頻文件格式與音頻單元格式不同,則可以使用ExtAudioFile“即時”轉換音頻。 要實現這一點,您必須將kExtAudioFileProperty_ClientDataFormat屬性設置為您從'kAudioUnitProperty_StreamFormat'獲取的格式。 現在,當您閱讀音頻文件時,您將獲得所需格式的音頻。

此外,請確保Audio Unit的kAudioUnitProperty_ShouldAllocateBuffer屬性設置為1

要調用AudioUnitRender你必須准備好有效AudioTimeStampAudioUnitRenderActionFlags (可設置為0 )和AudioBufferList 您不需要為緩沖區分配內存,只需提供緩沖區數量和大小即可。

AudioBufferList *buffer = malloc(sizeof(AudioBufferList) + sizeof(AudioBuffer));
buffer->mNumberBuffers = 2; // at least 2 buffers
buffer->mBuffers[0].mDataByteSize = ...; // size of one buffer
buffer->mBuffers[1].mDataByteSize = ...; 

AudioUnitRenderActionFlags flags = 0;

AudioTimeStamp timeStamp;
memset(&timeStamp, 0, sizeof(AudioTimeStamp));
timeStamp.mFlags = kAudioTimeStampSampleTimeValid;

UInt32 frames = ...; // number of frames in buffer
AudioUnit unit = ...; // your Delay unit

現在您可以調用AudioUnitRender

AudioUnitRender(unit, &flags, &timeStamp, 0, frames, buffer);

音頻單元將要求回調填充緩沖區並對聲音應用效果,之后您將獲得具有有效音頻的緩沖區。 在這種情況下,您需要將kAudioUnitProperty_SetRenderCallback屬性設置為有效的回調。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM