August 16, 2013

iOS: Force audio output to speakers while headphones are plugged in

After much searching through Apple documentation and scarce examples of what I wanted to do, I came up with the following code. A client wanted to play audio through the iPhone/iPad speakers while a microphone was plugged in. While this solution can't do both at the same time, it will let you switch back and forth between playing sounds through the speakers, then record through a microphone or a headset, without unplugging anything. It will also default to use the internal microphone and speakers if nothing is plugged in. Note that by calling the setup method, audio output will initially be forced through the speakers, rather than the headphones, if plugged in. Hopefully this code helps someone facing similar issues.

AudioRouter.h
@interface AudioRouter : NSObject

+ (void) initAudioSessionRouting;
+ (void) switchToDefaultHardware;
+ (void) forceOutputToBuiltInSpeakers;

@end
AudioRouter.m
#import "AudioRouter.h"
#import <AudioToolbox/AudioToolbox.h>
#import <AVFoundation/AVFoundation.h>

@implementation AudioRouter

#define IS_DEBUGGING NO
#define IS_DEBUGGING_EXTRA_INFO NO

+ (void) initAudioSessionRouting {
    
    // Called once to route all audio through speakers, even if something's plugged into the headphone jack
    static BOOL audioSessionSetup = NO;
    if (audioSessionSetup == NO) {
        
        // set category to accept properties assigned below
        NSError *sessionError = nil;
        [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error: &sessionError];
        
        // Doubly force audio to come out of speaker
        UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
        AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof(audioRouteOverride), &audioRouteOverride);
        
        // fix issue with audio interrupting video recording - allow audio to mix on top of other media
        UInt32 doSetProperty = 1;
        AudioSessionSetProperty (kAudioSessionProperty_OverrideCategoryMixWithOthers, sizeof(doSetProperty), &doSetProperty);
        
        // set active
        [[AVAudioSession sharedInstance] setDelegate:self];
        [[AVAudioSession sharedInstance] setActive: YES error: nil];
        
        // add listener for audio input changes
        AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, onAudioRouteChange, nil );
        AudioSessionAddPropertyListener (kAudioSessionProperty_AudioInputAvailable, onAudioRouteChange, nil );
        
    }
    
    // Force audio to come out of speaker
    [[AVAudioSession sharedInstance] overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:nil];
    
    
    // set flag
    audioSessionSetup = YES;
}

+ (void) switchToDefaultHardware {
    // Remove forcing to built-in speaker
    UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_None;
    AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof(audioRouteOverride), &audioRouteOverride);
}

+ (void) forceOutputToBuiltInSpeakers {
    // Re-force audio to come out of speaker
    UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
    AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof(audioRouteOverride), &audioRouteOverride);
    

}

void onAudioRouteChange (void* clientData, AudioSessionPropertyID inID, UInt32 dataSize, const void* inData) {
    
    if( IS_DEBUGGING == YES ) {
        NSLog(@"==== Audio Harware Status ====");
        NSLog(@"Current Input:  %@", [AudioRouter getAudioSessionInput]);
        NSLog(@"Current Output: %@", [AudioRouter getAudioSessionOutput]);
        NSLog(@"Current hardware route: %@", [AudioRouter getAudioSessionRoute]);
        NSLog(@"==============================");
    }
    
    if( IS_DEBUGGING_EXTRA_INFO == YES ) {
        NSLog(@"==== Audio Harware Status (EXTENDED) ====");
        CFDictionaryRef dict = (CFDictionaryRef)inData;
        CFNumberRef reason = CFDictionaryGetValue(dict, kAudioSession_RouteChangeKey_Reason);
        CFDictionaryRef oldRoute = CFDictionaryGetValue(dict, kAudioSession_AudioRouteChangeKey_PreviousRouteDescription);
        CFDictionaryRef newRoute = CFDictionaryGetValue(dict, kAudioSession_AudioRouteChangeKey_CurrentRouteDescription);
        NSLog(@"Audio old route: %@", oldRoute);
        NSLog(@"Audio new route: %@", newRoute);
        NSLog(@"=========================================");
    }
    
    
    
}

+ (NSString*) getAudioSessionInput {
    UInt32 routeSize;
    AudioSessionGetPropertySize(kAudioSessionProperty_AudioRouteDescription, &routeSize);
    CFDictionaryRef desc; // this is the dictionary to contain descriptions
    
    // make the call to get the audio description and populate the desc dictionary
    AudioSessionGetProperty (kAudioSessionProperty_AudioRouteDescription, &routeSize, &desc);
    
    // the dictionary contains 2 keys, for input and output. Get output array
    CFArrayRef outputs = CFDictionaryGetValue(desc, kAudioSession_AudioRouteKey_Inputs);
    
    // the output array contains 1 element - a dictionary
    CFDictionaryRef diction = CFArrayGetValueAtIndex(outputs, 0);
    
    // get the output description from the dictionary
    CFStringRef input = CFDictionaryGetValue(diction, kAudioSession_AudioRouteKey_Type);
    return [NSString stringWithFormat:@"%@", input];
}

+ (NSString*) getAudioSessionOutput {
    UInt32 routeSize;
    AudioSessionGetPropertySize(kAudioSessionProperty_AudioRouteDescription, &routeSize);
    CFDictionaryRef desc; // this is the dictionary to contain descriptions
    
    // make the call to get the audio description and populate the desc dictionary
    AudioSessionGetProperty (kAudioSessionProperty_AudioRouteDescription, &routeSize, &desc);
    
    // the dictionary contains 2 keys, for input and output. Get output array
    CFArrayRef outputs = CFDictionaryGetValue(desc, kAudioSession_AudioRouteKey_Outputs);
    
    // the output array contains 1 element - a dictionary
    CFDictionaryRef diction = CFArrayGetValueAtIndex(outputs, 0);
    
    // get the output description from the dictionary
    CFStringRef output = CFDictionaryGetValue(diction, kAudioSession_AudioRouteKey_Type);
    return [NSString stringWithFormat:@"%@", output];
}

+ (NSString*) getAudioSessionRoute {
    /*
     returns the current session route:
     * ReceiverAndMicrophone
     * HeadsetInOut
     * Headset
     * HeadphonesAndMicrophone
     * Headphone
     * SpeakerAndMicrophone
     * Speaker
     * HeadsetBT
     * LineInOut
     * Lineout
     * Default
    */
    
    UInt32 rSize = sizeof (CFStringRef);
    CFStringRef route;
    AudioSessionGetProperty (kAudioSessionProperty_AudioRoute, &rSize, &route);
    
    if (route == NULL) {
        NSLog(@"Silent switch is currently on");
        return @"None";
    }
    return [NSString stringWithFormat:@"%@", route];
}

@end