Thursday, June 6, 2013

AVAudioSession AVAudioSessionCategoryAudioProcessing example in Objective C (iOS).


AVAudioSession AVAudioSessionCategoryAudioProcessing

NSString *const AVAudioSessionCategoryAudioProcessing;

For using an audio hardware codec or signal processor while not playing or recording audio. Use this category, for example, when performing offline audio format conversion.
This category disables playback (audio output) and disables recording (audio input).
Audio processing does not normally continue when your app is in the background. However, when your app moves to the background, you can request additional time to complete processing. For more information, see iOS App Programming Guide.

AVAudioSession AVAudioSessionCategoryAudioProcessing example.
//player

[[AVAudioSession sharedInstance] setDelegate:self];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAudioProcessing error:&error];

// Activates the audio session.
[[AVAudioSession sharedInstance] setActive:YES error:&error];

NSURL *fileURL = [NSURL fileURLWithPath:stringURL];
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithContentsOfURL:fileURL error:&error];

if (player == nil)
NSLog(@"AudioPlayer did not load properly: %@", [error description]);
else
NSLog(@"player is ok");

Example of [AVAudioSession AVAudioSessionCategoryAudioProcessing].
AVAudioSession *session = [AVAudioSession sharedInstance];
NSString *originalSessionCategory = [session category];
[session setCategory:AVAudioSessionCategoryAudioProcessing error:&error];
...
...
// restore category
[session setCategory:originalSessionCategory error:&error];
if(error)
    NSLog(@"%@",[error localizedDescription]);

AVAudioSession AVAudioSessionCategoryAudioProcessing example.
-(void) scaleAudioFileAmplitude:(NSURL *)theURL withAmpScale:(float) ampScale
{
    OSStatus err = noErr;

    ExtAudioFileRef audiofile;
    ExtAudioFileOpenURL((CFURLRef)theURL, &audiofile);
    assert(audiofile);

    // get some info about the file's format.
    AudioStreamBasicDescription fileFormat;
    UInt32 size = sizeof(fileFormat);
    err = ExtAudioFileGetProperty(audiofile, kExtAudioFileProperty_FileDataFormat, &size, &fileFormat);

    // we'll need to know what type of file it is later when we write
    AudioFileID aFile;
    size = sizeof(aFile);
    err = ExtAudioFileGetProperty(audiofile, kExtAudioFileProperty_AudioFile, &size, &aFile);
    AudioFileTypeID fileType;
    size = sizeof(fileType);
    err = AudioFileGetProperty(aFile, kAudioFilePropertyFileFormat, &size, &fileType);

    // tell the ExtAudioFile API what format we want samples back in
    AudioStreamBasicDescription clientFormat;
    bzero(&clientFormat, sizeof(clientFormat));
    clientFormat.mChannelsPerFrame = fileFormat.mChannelsPerFrame;
    clientFormat.mBytesPerFrame = 4;
    clientFormat.mBytesPerPacket = clientFormat.mBytesPerFrame;
    clientFormat.mFramesPerPacket = 1;
    clientFormat.mBitsPerChannel = 32;
    clientFormat.mFormatID = kAudioFormatLinearPCM;
    clientFormat.mSampleRate = fileFormat.mSampleRate;
    clientFormat.mFormatFlags = kLinearPCMFormatFlagIsFloat | kAudioFormatFlagIsNonInterleaved;
    err = ExtAudioFileSetProperty(audiofile, kExtAudioFileProperty_ClientDataFormat, sizeof(clientFormat), &clientFormat);

    // find out how many frames we need to read
    SInt64 numFrames = 0;
    size = sizeof(numFrames);
    err = ExtAudioFileGetProperty(audiofile, kExtAudioFileProperty_FileLengthFrames, &size, &numFrames);

    // create the buffers for reading in data
    AudioBufferList *bufferList = malloc(sizeof(AudioBufferList) + sizeof(AudioBuffer) * (clientFormat.mChannelsPerFrame - 1));
    bufferList->mNumberBuffers = clientFormat.mChannelsPerFrame;
    //printf("bufferList->mNumberBuffers = %lu \n\n", bufferList->mNumberBuffers);

    for (int ii=0; ii < bufferList->mNumberBuffers; ++ii) {
        bufferList->mBuffers[ii].mDataByteSize = sizeof(float) * numFrames;
        bufferList->mBuffers[ii].mNumberChannels = 1;
        bufferList->mBuffers[ii].mData = malloc(bufferList->mBuffers[ii].mDataByteSize);
    }

    // read in the data
    UInt32 rFrames = (UInt32)numFrames;
    err = ExtAudioFileRead(audiofile, &rFrames, bufferList);

    // close the file
    err = ExtAudioFileDispose(audiofile);

    // process the audio
    for (int ii=0; ii < bufferList->mNumberBuffers; ++ii) {
        float *fBuf = (float *)bufferList->mBuffers[ii].mData;
        for (int jj=0; jj < rFrames; ++jj) {
            *fBuf = *fBuf * ampScale;
            fBuf++;
        }
    }

    // open the file for writing
    err = ExtAudioFileCreateWithURL((CFURLRef)theURL, fileType, &fileFormat, NULL, kAudioFileFlags_EraseFile, &audiofile);

    NSError *error = NULL;

/*************************** You Need This Now ****************************/
        AVAudioSession *session = [AVAudioSession sharedInstance];
        [session setCategory:AVAudioSessionCategoryAudioProcessing error:&error];
/************************* End You Need This Now **************************/

    // tell the ExtAudioFile API what format we'll be sending samples in
    err = ExtAudioFileSetProperty(audiofile, kExtAudioFileProperty_ClientDataFormat, sizeof(clientFormat), &clientFormat);

    error = [NSError errorWithDomain:NSOSStatusErrorDomain
                                         code:err
                                     userInfo:nil];
    NSLog(@"Error: %@", [error description]);

    // write the data
    err = ExtAudioFileWrite(audiofile, rFrames, bufferList);

    // close the file
    ExtAudioFileDispose(audiofile);

    // destroy the buffers
    for (int ii=0; ii < bufferList->mNumberBuffers; ++ii) {
        free(bufferList->mBuffers[ii].mData);
    }
    free(bufferList);
    bufferList = NULL;

}

End of AVAudioSession AVAudioSessionCategoryAudioProcessing example article.