2005-09-07 22:33:16 +00:00
|
|
|
//
|
|
|
|
// OutputCoreAudio.m
|
|
|
|
// Cog
|
|
|
|
//
|
2006-09-04 18:46:18 +00:00
|
|
|
// Created by Vincent Spader on 8/2/05.
|
|
|
|
// Copyright 2005 Vincent Spader. All rights reserved.
|
2005-09-07 22:33:16 +00:00
|
|
|
//
|
|
|
|
|
|
|
|
#import "OutputCoreAudio.h"
|
2007-02-24 20:36:27 +00:00
|
|
|
#import "OutputNode.h"
|
2005-09-07 22:33:16 +00:00
|
|
|
|
2013-10-11 12:03:55 +00:00
|
|
|
#import "Logging.h"
|
|
|
|
|
2005-09-07 22:33:16 +00:00
|
|
|
@implementation OutputCoreAudio
|
|
|
|
|
2007-02-24 20:36:27 +00:00
|
|
|
- (id)initWithController:(OutputNode *)c
|
2005-09-07 22:33:16 +00:00
|
|
|
{
|
2006-01-20 15:34:02 +00:00
|
|
|
self = [super init];
|
|
|
|
if (self)
|
2005-09-07 22:33:16 +00:00
|
|
|
{
|
2006-01-20 15:34:02 +00:00
|
|
|
outputController = c;
|
2006-04-02 15:44:08 +00:00
|
|
|
outputUnit = NULL;
|
2007-02-20 01:02:23 +00:00
|
|
|
|
|
|
|
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.outputDevice" options:0 context:NULL];
|
2005-09-07 22:33:16 +00:00
|
|
|
}
|
|
|
|
|
2006-01-20 15:34:02 +00:00
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
|
|
|
static OSStatus Sound_Renderer(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData)
|
|
|
|
{
|
|
|
|
OutputCoreAudio *output = (OutputCoreAudio *)inRefCon;
|
|
|
|
OSStatus err = noErr;
|
|
|
|
void *readPointer = ioData->mBuffers[0].mData;
|
|
|
|
|
|
|
|
int amountToRead, amountRead;
|
|
|
|
|
|
|
|
if ([output->outputController shouldContinue] == NO)
|
2005-09-07 22:33:16 +00:00
|
|
|
{
|
2006-01-20 15:34:02 +00:00
|
|
|
AudioOutputUnitStop(output->outputUnit);
|
2006-04-02 15:44:08 +00:00
|
|
|
// [output stop];
|
|
|
|
|
2006-01-20 15:34:02 +00:00
|
|
|
return err;
|
2005-09-07 22:33:16 +00:00
|
|
|
}
|
|
|
|
|
2006-01-20 15:34:02 +00:00
|
|
|
amountToRead = inNumberFrames*(output->deviceFormat.mBytesPerPacket);
|
|
|
|
amountRead = [output->outputController readData:(readPointer) amount:amountToRead];
|
2007-05-26 22:13:11 +00:00
|
|
|
|
2006-04-02 15:44:08 +00:00
|
|
|
if ((amountRead < amountToRead) && [output->outputController endOfStream] == NO) //Try one more time! for track changes!
|
|
|
|
{
|
|
|
|
int amountRead2; //Use this since return type of readdata isnt known...may want to fix then can do a simple += to readdata
|
|
|
|
amountRead2 = [output->outputController readData:(readPointer+amountRead) amount:amountToRead-amountRead];
|
|
|
|
amountRead += amountRead2;
|
|
|
|
}
|
2005-09-07 22:33:16 +00:00
|
|
|
|
2006-01-20 15:34:02 +00:00
|
|
|
ioData->mBuffers[0].mDataByteSize = amountRead;
|
2007-05-26 22:13:11 +00:00
|
|
|
ioData->mBuffers[0].mNumberChannels = output->deviceFormat.mChannelsPerFrame;
|
2009-06-28 17:51:30 +00:00
|
|
|
ioData->mNumberBuffers = 1;
|
2007-05-26 22:13:11 +00:00
|
|
|
|
2005-09-07 22:33:16 +00:00
|
|
|
return err;
|
2007-02-20 01:02:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
|
|
|
|
{
|
|
|
|
if ([keyPath isEqualToString:@"values.outputDevice"]) {
|
2007-07-11 01:20:32 +00:00
|
|
|
|
2007-02-20 01:02:23 +00:00
|
|
|
NSDictionary *device = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] objectForKey:@"outputDevice"];
|
|
|
|
|
|
|
|
NSNumber *deviceID = [device objectForKey:@"deviceID"];
|
|
|
|
|
|
|
|
[self setOutputDevice:[deviceID longValue]];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
- (BOOL)setOutputDevice:(AudioDeviceID)outputDevice
|
|
|
|
{
|
|
|
|
// Set the output device
|
|
|
|
AudioDeviceID deviceID = outputDevice; //XXX use default if null
|
|
|
|
OSStatus err;
|
|
|
|
|
|
|
|
if (outputDevice == -1) {
|
2013-10-11 12:03:55 +00:00
|
|
|
DLog(@"DEVICE IS -1");
|
2007-02-20 01:02:23 +00:00
|
|
|
UInt32 size = sizeof(AudioDeviceID);
|
2013-10-03 08:00:58 +00:00
|
|
|
AudioObjectPropertyAddress theAddress = {
|
|
|
|
.mSelector = kAudioHardwarePropertyDefaultOutputDevice,
|
|
|
|
.mScope = kAudioObjectPropertyScopeGlobal,
|
|
|
|
.mElement = kAudioObjectPropertyElementMaster
|
|
|
|
};
|
|
|
|
err = AudioObjectGetPropertyData(kAudioObjectSystemObject, &theAddress, 0, NULL, &size, &deviceID);
|
2007-02-20 01:02:23 +00:00
|
|
|
|
|
|
|
if (err != noErr) {
|
2013-10-11 12:03:55 +00:00
|
|
|
ALog(@"THERES NO DEFAULT OUTPUT DEVICE");
|
2007-02-20 01:02:23 +00:00
|
|
|
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-06-28 17:51:30 +00:00
|
|
|
printf("DEVICE: %i\n", deviceID);
|
2013-10-07 07:24:00 +00:00
|
|
|
outputDeviceID = deviceID;
|
2007-02-20 01:02:23 +00:00
|
|
|
|
|
|
|
err = AudioUnitSetProperty(outputUnit,
|
|
|
|
kAudioOutputUnitProperty_CurrentDevice,
|
2009-06-28 17:51:30 +00:00
|
|
|
kAudioUnitScope_Output,
|
2007-02-20 01:02:23 +00:00
|
|
|
0,
|
|
|
|
&deviceID,
|
|
|
|
sizeof(AudioDeviceID));
|
|
|
|
|
|
|
|
if (err != noErr) {
|
2013-10-12 20:52:58 +00:00
|
|
|
ALog(@"No output device could be found, your random error code is %d. Have a nice day!", err);
|
2007-02-20 01:02:23 +00:00
|
|
|
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
|
|
|
|
return YES;
|
|
|
|
}
|
2005-09-07 22:33:16 +00:00
|
|
|
|
2006-01-20 15:34:02 +00:00
|
|
|
- (BOOL)setup
|
2005-09-07 22:33:16 +00:00
|
|
|
{
|
2006-04-02 15:44:08 +00:00
|
|
|
if (outputUnit)
|
|
|
|
[self stop];
|
|
|
|
|
2013-10-07 07:24:00 +00:00
|
|
|
AudioObjectPropertyAddress propertyAddress = {
|
|
|
|
.mElement = kAudioObjectPropertyElementMaster
|
|
|
|
};
|
|
|
|
UInt32 dataSize;
|
|
|
|
|
|
|
|
ComponentDescription desc;
|
2005-09-07 22:33:16 +00:00
|
|
|
OSStatus err;
|
|
|
|
|
|
|
|
desc.componentType = kAudioUnitType_Output;
|
|
|
|
desc.componentSubType = kAudioUnitSubType_DefaultOutput;
|
|
|
|
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
|
|
|
|
desc.componentFlags = 0;
|
|
|
|
desc.componentFlagsMask = 0;
|
|
|
|
|
|
|
|
Component comp = FindNextComponent(NULL, &desc); //Finds an component that meets the desc spec's
|
|
|
|
if (comp == NULL)
|
|
|
|
return NO;
|
|
|
|
|
|
|
|
err = OpenAComponent(comp, &outputUnit); //gains access to the services provided by the component
|
|
|
|
if (err)
|
|
|
|
return NO;
|
|
|
|
|
|
|
|
// Initialize AudioUnit
|
|
|
|
err = AudioUnitInitialize(outputUnit);
|
|
|
|
if (err != noErr)
|
|
|
|
return NO;
|
2009-06-28 17:51:30 +00:00
|
|
|
|
|
|
|
// Setup the output device before mucking with settings
|
|
|
|
NSDictionary *device = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] objectForKey:@"outputDevice"];
|
|
|
|
if (device) {
|
|
|
|
BOOL ok = [self setOutputDevice:[[device objectForKey:@"deviceID"] longValue]];
|
|
|
|
if (!ok) {
|
|
|
|
//Ruh roh.
|
|
|
|
[self setOutputDevice: -1];
|
|
|
|
|
|
|
|
[[[NSUserDefaultsController sharedUserDefaultsController] defaults] removeObjectForKey:@"outputDevice"];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
[self setOutputDevice: -1];
|
|
|
|
}
|
2005-09-07 22:33:16 +00:00
|
|
|
|
|
|
|
UInt32 size = sizeof (AudioStreamBasicDescription);
|
|
|
|
Boolean outWritable;
|
|
|
|
//Gets the size of the Stream Format Property and if it is writable
|
|
|
|
AudioUnitGetPropertyInfo(outputUnit,
|
|
|
|
kAudioUnitProperty_StreamFormat,
|
|
|
|
kAudioUnitScope_Output,
|
|
|
|
0,
|
|
|
|
&size,
|
|
|
|
&outWritable);
|
|
|
|
//Get the current stream format of the output
|
|
|
|
err = AudioUnitGetProperty (outputUnit,
|
|
|
|
kAudioUnitProperty_StreamFormat,
|
|
|
|
kAudioUnitScope_Output,
|
|
|
|
0,
|
|
|
|
&deviceFormat,
|
|
|
|
&size);
|
|
|
|
|
|
|
|
if (err != noErr)
|
|
|
|
return NO;
|
|
|
|
|
|
|
|
// change output format...
|
2009-06-28 18:57:48 +00:00
|
|
|
|
2013-10-07 07:24:00 +00:00
|
|
|
// The default channel map is silence
|
|
|
|
SInt32 deviceChannelMap [deviceFormat.mChannelsPerFrame];
|
|
|
|
for(UInt32 i = 0; i < deviceFormat.mChannelsPerFrame; ++i)
|
|
|
|
deviceChannelMap[i] = -1;
|
|
|
|
|
|
|
|
// Determine the device's preferred stereo channels for output mapping
|
|
|
|
if(1 == deviceFormat.mChannelsPerFrame || 2 == deviceFormat.mChannelsPerFrame) {
|
|
|
|
propertyAddress.mSelector = kAudioDevicePropertyPreferredChannelsForStereo;
|
|
|
|
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
|
|
|
|
|
|
|
|
UInt32 preferredStereoChannels [2] = { 1, 2 };
|
|
|
|
if(AudioObjectHasProperty(outputDeviceID, &propertyAddress)) {
|
|
|
|
dataSize = sizeof(preferredStereoChannels);
|
|
|
|
|
|
|
|
err = AudioObjectGetPropertyData(outputDeviceID, &propertyAddress, 0, nil, &dataSize, &preferredStereoChannels);
|
|
|
|
}
|
|
|
|
|
|
|
|
AudioChannelLayout stereoLayout;
|
|
|
|
stereoLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
|
|
|
|
|
|
|
|
const AudioChannelLayout *specifier [1] = { &stereoLayout };
|
|
|
|
|
|
|
|
SInt32 stereoChannelMap [2] = { 1, 2 };
|
|
|
|
dataSize = sizeof(stereoChannelMap);
|
|
|
|
err = AudioFormatGetProperty(kAudioFormatProperty_ChannelMap, sizeof(specifier), specifier, &dataSize, stereoChannelMap);
|
|
|
|
|
|
|
|
if(noErr == err) {
|
|
|
|
deviceChannelMap[preferredStereoChannels[0] - 1] = stereoChannelMap[0];
|
|
|
|
deviceChannelMap[preferredStereoChannels[1] - 1] = stereoChannelMap[1];
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Just use a channel map that makes sense
|
|
|
|
deviceChannelMap[preferredStereoChannels[0] - 1] = 0;
|
|
|
|
deviceChannelMap[preferredStereoChannels[1] - 1] = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Determine the device's preferred multichannel layout
|
|
|
|
else {
|
|
|
|
propertyAddress.mSelector = kAudioDevicePropertyPreferredChannelLayout;
|
|
|
|
propertyAddress.mScope = kAudioDevicePropertyScopeOutput;
|
|
|
|
|
|
|
|
if(AudioObjectHasProperty(outputDeviceID, &propertyAddress)) {
|
|
|
|
err = AudioObjectGetPropertyDataSize(outputDeviceID, &propertyAddress, 0, nil, &dataSize);
|
|
|
|
|
|
|
|
AudioChannelLayout *preferredChannelLayout = (AudioChannelLayout *)(malloc(dataSize));
|
|
|
|
|
|
|
|
err = AudioObjectGetPropertyData(outputDeviceID, &propertyAddress, 0, nil, &dataSize, preferredChannelLayout);
|
|
|
|
|
|
|
|
const AudioChannelLayout *specifier [1] = { preferredChannelLayout };
|
|
|
|
|
|
|
|
// Not all channel layouts can be mapped, so handle failure with a generic mapping
|
|
|
|
dataSize = (UInt32)sizeof(deviceChannelMap);
|
|
|
|
err = AudioFormatGetProperty(kAudioFormatProperty_ChannelMap, sizeof(specifier), specifier, &dataSize, deviceChannelMap);
|
|
|
|
|
|
|
|
if(noErr != err) {
|
|
|
|
// Just use a channel map that makes sense
|
|
|
|
for(UInt32 i = 0; i < deviceFormat.mChannelsPerFrame; ++i)
|
|
|
|
deviceChannelMap[i] = i;
|
|
|
|
}
|
|
|
|
|
|
|
|
free(preferredChannelLayout), preferredChannelLayout = nil;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Just use a channel map that makes sense
|
|
|
|
for(UInt32 i = 0; i < deviceFormat.mChannelsPerFrame; ++i)
|
|
|
|
deviceChannelMap[i] = i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2005-09-07 22:33:16 +00:00
|
|
|
///Seems some 3rd party devices return incorrect stuff...or I just don't like noninterleaved data.
|
|
|
|
deviceFormat.mFormatFlags &= ~kLinearPCMFormatFlagIsNonInterleaved;
|
2006-01-20 15:34:02 +00:00
|
|
|
// deviceFormat.mFormatFlags &= ~kLinearPCMFormatFlagIsFloat;
|
|
|
|
// deviceFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
|
2005-09-07 22:33:16 +00:00
|
|
|
deviceFormat.mBytesPerFrame = deviceFormat.mChannelsPerFrame*(deviceFormat.mBitsPerChannel/8);
|
|
|
|
deviceFormat.mBytesPerPacket = deviceFormat.mBytesPerFrame * deviceFormat.mFramesPerPacket;
|
2009-06-28 18:57:48 +00:00
|
|
|
|
2005-09-07 22:33:16 +00:00
|
|
|
err = AudioUnitSetProperty (outputUnit,
|
|
|
|
kAudioUnitProperty_StreamFormat,
|
|
|
|
kAudioUnitScope_Output,
|
|
|
|
0,
|
|
|
|
&deviceFormat,
|
|
|
|
size);
|
|
|
|
|
|
|
|
//Set the stream format of the output to match the input
|
|
|
|
err = AudioUnitSetProperty (outputUnit,
|
|
|
|
kAudioUnitProperty_StreamFormat,
|
|
|
|
kAudioUnitScope_Input,
|
|
|
|
0,
|
|
|
|
&deviceFormat,
|
|
|
|
size);
|
2013-10-07 07:24:00 +00:00
|
|
|
|
|
|
|
size = sizeof(deviceChannelMap);
|
|
|
|
err = AudioUnitSetProperty(outputUnit,
|
|
|
|
kAudioOutputUnitProperty_ChannelMap,
|
|
|
|
kAudioUnitScope_Output,
|
|
|
|
0,
|
|
|
|
deviceChannelMap,
|
|
|
|
size);
|
2005-09-07 22:33:16 +00:00
|
|
|
|
|
|
|
//setup render callbacks
|
|
|
|
renderCallback.inputProc = Sound_Renderer;
|
|
|
|
renderCallback.inputProcRefCon = self;
|
|
|
|
|
|
|
|
AudioUnitSetProperty(outputUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &renderCallback, sizeof(AURenderCallbackStruct));
|
2009-06-28 18:57:48 +00:00
|
|
|
|
2006-01-20 15:34:02 +00:00
|
|
|
[outputController setFormat:&deviceFormat];
|
2005-09-07 22:33:16 +00:00
|
|
|
|
|
|
|
return (err == noErr);
|
|
|
|
}
|
|
|
|
|
2006-01-20 15:34:02 +00:00
|
|
|
- (void)setVolume:(double)v
|
|
|
|
{
|
|
|
|
AudioUnitSetParameter (outputUnit,
|
|
|
|
kHALOutputParam_Volume,
|
|
|
|
kAudioUnitScope_Global,
|
|
|
|
0,
|
|
|
|
v * 0.01f,
|
|
|
|
0);
|
|
|
|
}
|
|
|
|
|
2005-09-07 22:33:16 +00:00
|
|
|
- (void)start
|
|
|
|
{
|
|
|
|
AudioOutputUnitStart(outputUnit);
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)stop
|
|
|
|
{
|
|
|
|
if (outputUnit)
|
2006-01-20 15:34:02 +00:00
|
|
|
{
|
2005-09-07 22:33:16 +00:00
|
|
|
AudioOutputUnitStop(outputUnit);
|
2006-01-20 15:34:02 +00:00
|
|
|
AudioUnitUninitialize (outputUnit);
|
|
|
|
CloseComponent(outputUnit);
|
2007-07-05 23:08:10 +00:00
|
|
|
outputUnit = NULL;
|
2006-01-20 15:34:02 +00:00
|
|
|
}
|
2005-09-07 22:33:16 +00:00
|
|
|
}
|
|
|
|
|
2006-04-02 15:44:08 +00:00
|
|
|
- (void)dealloc
|
|
|
|
{
|
|
|
|
[self stop];
|
2007-10-13 07:09:46 +00:00
|
|
|
|
|
|
|
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.outputDevice"];
|
2007-02-24 20:36:27 +00:00
|
|
|
|
|
|
|
[super dealloc];
|
2006-04-02 15:44:08 +00:00
|
|
|
}
|
|
|
|
|
2006-01-29 14:57:48 +00:00
|
|
|
- (void)pause
|
|
|
|
{
|
|
|
|
AudioOutputUnitStop(outputUnit);
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)resume
|
|
|
|
{
|
2013-10-03 08:00:58 +00:00
|
|
|
AudioOutputUnitStart(outputUnit);
|
2006-01-29 14:57:48 +00:00
|
|
|
}
|
|
|
|
|
2005-09-07 22:33:16 +00:00
|
|
|
@end
|