cog/Audio/Output/OutputCoreAudio.m

573 lines
18 KiB
Matlab
Raw Normal View History

2020-03-23 08:46:41 +00:00
//
2005-09-07 22:33:16 +00:00
// OutputCoreAudio.m
// Cog
//
// Created by Vincent Spader on 8/2/05.
// Copyright 2005 Vincent Spader. All rights reserved.
2005-09-07 22:33:16 +00:00
//
#import "OutputCoreAudio.h"
#import "OutputNode.h"
2005-09-07 22:33:16 +00:00
#import "Logging.h"
extern void scale_by_volume(float * buffer, size_t count, float volume);
2005-09-07 22:33:16 +00:00
@implementation OutputCoreAudio
- (id)initWithController:(OutputNode *)c
2005-09-07 22:33:16 +00:00
{
2006-01-20 15:34:02 +00:00
self = [super init];
if (self)
2005-09-07 22:33:16 +00:00
{
2006-01-20 15:34:02 +00:00
outputController = c;
_au = nil;
_bufferSize = 0;
volume = 1.0;
outputDeviceID = -1;
listenerapplied = NO;
running = NO;
started = NO;
atomic_init(&bytesRendered, 0);
writeSemaphore = [[Semaphore alloc] init];
readSemaphore = [[Semaphore alloc] init];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.outputDevice" options:0 context:NULL];
2005-09-07 22:33:16 +00:00
}
2006-01-20 15:34:02 +00:00
return self;
}
static OSStatus
default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const AudioObjectPropertyAddress *inAddresses, void *inUserData)
{
OutputCoreAudio *this = (__bridge OutputCoreAudio *) inUserData;
return [this setOutputDeviceByID:-1];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if ([keyPath isEqualToString:@"values.outputDevice"]) {
2007-07-11 01:20:32 +00:00
NSDictionary *device = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] objectForKey:@"outputDevice"];
2020-02-01 12:59:30 +00:00
[self setOutputDeviceWithDeviceDict:device];
}
}
- (void)signalEndOfStream
{
[outputController resetAmountPlayed];
[outputController endOfInputPlayed];
}
- (void)threadEntry:(id)arg
{
running = YES;
started = NO;
size_t eventCount = 0;
atomic_store(&bytesRendered, 0);
NSMutableArray *delayedEvents = [[NSMutableArray alloc] init];
BOOL delayedEventsPopped = YES;
while (!stopping) {
if (++eventCount == 128) {
[self updateDeviceFormat];
eventCount = 0;
}
if ([outputController shouldReset]) {
[[outputController buffer] empty];
[outputController setShouldReset:NO];
}
while ([delayedEvents count]) {
size_t localBytesRendered = atomic_load_explicit(&bytesRendered, memory_order_relaxed);
if (localBytesRendered >= [[delayedEvents objectAtIndex:0] longValue]) {
if ([outputController chainQueueHasTracks])
delayedEventsPopped = YES;
[self signalEndOfStream];
[delayedEvents removeObjectAtIndex:0];
}
else break;
}
if (stopping)
break;
void *writePtr;
int toWrite = [[outputController buffer] lengthAvailableToWriteReturningPointer:&writePtr];
int bytesRead = 0;
if (toWrite > CHUNK_SIZE)
toWrite = CHUNK_SIZE;
if (toWrite)
bytesRead = [outputController readData:writePtr amount:toWrite];
if (bytesRead) {
[[outputController buffer] didWriteLength:bytesRead];
[readSemaphore signal];
continue;
}
else if ([outputController shouldContinue] == NO)
break;
else if (!toWrite) {
if (!started) {
started = YES;
if (!paused) {
NSError *err;
[_au startHardwareAndReturnError:&err];
}
}
}
else {
// End of input possibly reached
if (delayedEventsPopped && [outputController endOfStream] == YES)
{
long bytesBuffered = [[outputController buffer] bufferedLength];
bytesBuffered += atomic_load_explicit(&bytesRendered, memory_order_relaxed);
if ([outputController chainQueueHasTracks])
{
if (bytesBuffered < CHUNK_SIZE)
bytesBuffered = 0;
else
bytesBuffered -= CHUNK_SIZE;
}
[delayedEvents addObject:[NSNumber numberWithLong:bytesBuffered]];
delayedEventsPopped = NO;
if (!started) {
started = YES;
if (!paused) {
NSError *err;
[_au startHardwareAndReturnError:&err];
}
}
}
}
[readSemaphore signal];
[writeSemaphore timedWait:5000];
}
stopped = YES;
[self stop];
}
2020-02-17 17:20:48 +00:00
- (OSStatus)setOutputDeviceByID:(AudioDeviceID)deviceID
{
OSStatus err;
BOOL defaultDevice = NO;
AudioObjectPropertyAddress theAddress = {
.mSelector = kAudioHardwarePropertyDefaultOutputDevice,
.mScope = kAudioObjectPropertyScopeGlobal,
.mElement = kAudioObjectPropertyElementMaster
};
2020-02-01 12:59:30 +00:00
if (deviceID == -1) {
defaultDevice = YES;
UInt32 size = sizeof(AudioDeviceID);
err = AudioObjectGetPropertyData(kAudioObjectSystemObject, &theAddress, 0, NULL, &size, &deviceID);
if (err != noErr) {
2020-02-17 17:20:48 +00:00
DLog(@"THERE'S NO DEFAULT OUTPUT DEVICE");
2020-02-01 12:59:30 +00:00
return err;
}
}
if (_au) {
AudioObjectPropertyAddress defaultDeviceAddress = theAddress;
if (listenerapplied && !defaultDevice) {
AudioObjectRemovePropertyListener(kAudioObjectSystemObject, &defaultDeviceAddress, default_device_changed, (__bridge void * _Nullable)(self));
listenerapplied = NO;
}
if (outputDeviceID != deviceID) {
DLog(@"Device: %i\n", deviceID);
outputDeviceID = deviceID;
NSError *nserr;
[_au setDeviceID:outputDeviceID error:&nserr];
if (nserr != nil) {
return (OSErr)[nserr code];
}
}
if (!listenerapplied && defaultDevice) {
AudioObjectAddPropertyListener(kAudioObjectSystemObject, &defaultDeviceAddress, default_device_changed, (__bridge void * _Nullable)(self));
listenerapplied = YES;
}
}
else {
err = noErr;
}
2020-02-17 17:20:48 +00:00
if (err != noErr) {
DLog(@"No output device with ID %d could be found.", deviceID);
return err;
}
2020-02-01 12:59:30 +00:00
return err;
}
- (BOOL)setOutputDeviceWithDeviceDict:(NSDictionary *)deviceDict
{
NSNumber *deviceIDNum = [deviceDict objectForKey:@"deviceID"];
AudioDeviceID outputDeviceID = [deviceIDNum unsignedIntValue] ?: -1;
__block OSStatus err = [self setOutputDeviceByID:outputDeviceID];
if (err != noErr) {
2020-02-01 12:59:30 +00:00
// Try matching by name.
NSString *userDeviceName = deviceDict[@"name"];
2020-02-17 17:20:48 +00:00
2020-02-01 12:59:30 +00:00
[self enumerateAudioOutputsUsingBlock:
^(NSString *deviceName, AudioDeviceID deviceID, AudioDeviceID systemDefaultID, BOOL *stop) {
2020-02-17 17:20:48 +00:00
if ([deviceName isEqualToString:userDeviceName]) {
err = [self setOutputDeviceByID:deviceID];
#if 0
2020-02-17 17:20:48 +00:00
// Disable. Would cause loop by triggering `-observeValueForKeyPath:ofObject:change:context:` above.
// Update `outputDevice`, in case the ID has changed.
NSDictionary *deviceInfo = @{
@"name": deviceName,
@"deviceID": @(deviceID),
};
[[NSUserDefaults standardUserDefaults] setObject:deviceInfo forKey:@"outputDevice"];
#endif
2020-02-17 17:20:48 +00:00
DLog(@"Found output device: \"%@\" (%d).", deviceName, deviceID);
*stop = YES;
}
2020-02-01 12:59:30 +00:00
}];
}
if (err != noErr) {
ALog(@"No output device could be found, your random error code is %d. Have a nice day!", err);
return NO;
}
return YES;
}
2005-09-07 22:33:16 +00:00
2020-02-01 12:59:30 +00:00
// The following is largely a copy pasta of -awakeFromNib from "OutputsArrayController.m".
// TODO: Share the code. (How to do this across xcodeproj?)
- (void)enumerateAudioOutputsUsingBlock:(void (NS_NOESCAPE ^ _Nonnull)(NSString *deviceName, AudioDeviceID deviceID, AudioDeviceID systemDefaultID, BOOL *stop))block
{
2020-02-01 12:59:30 +00:00
UInt32 propsize;
AudioObjectPropertyAddress theAddress = {
.mSelector = kAudioHardwarePropertyDevices,
.mScope = kAudioObjectPropertyScopeGlobal,
.mElement = kAudioObjectPropertyElementMaster
};
2020-02-01 12:59:30 +00:00
__Verify_noErr(AudioObjectGetPropertyDataSize(kAudioObjectSystemObject, &theAddress, 0, NULL, &propsize));
UInt32 nDevices = propsize / (UInt32)sizeof(AudioDeviceID);
2020-02-01 12:59:30 +00:00
AudioDeviceID *devids = malloc(propsize);
__Verify_noErr(AudioObjectGetPropertyData(kAudioObjectSystemObject, &theAddress, 0, NULL, &propsize, devids));
theAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice;
AudioDeviceID systemDefault;
propsize = sizeof(systemDefault);
__Verify_noErr(AudioObjectGetPropertyData(kAudioObjectSystemObject, &theAddress, 0, NULL, &propsize, &systemDefault));
theAddress.mScope = kAudioDevicePropertyScopeOutput;
for (UInt32 i = 0; i < nDevices; ++i) {
2020-02-01 12:59:30 +00:00
CFStringRef name = NULL;
propsize = sizeof(name);
theAddress.mSelector = kAudioDevicePropertyDeviceNameCFString;
__Verify_noErr(AudioObjectGetPropertyData(devids[i], &theAddress, 0, NULL, &propsize, &name));
propsize = 0;
theAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
__Verify_noErr(AudioObjectGetPropertyDataSize(devids[i], &theAddress, 0, NULL, &propsize));
2021-12-27 02:08:53 +00:00
if (propsize < sizeof(UInt32)) {
CFRelease(name);
continue;
}
2020-02-01 12:59:30 +00:00
AudioBufferList * bufferList = (AudioBufferList *) malloc(propsize);
__Verify_noErr(AudioObjectGetPropertyData(devids[i], &theAddress, 0, NULL, &propsize, bufferList));
UInt32 bufferCount = bufferList->mNumberBuffers;
free(bufferList);
2021-12-27 02:08:53 +00:00
if (!bufferCount) {
CFRelease(name);
continue;
}
2020-02-01 12:59:30 +00:00
BOOL stop = NO;
block([NSString stringWithString:(__bridge NSString *)name],
devids[i],
systemDefault,
2020-02-01 12:59:30 +00:00
&stop);
CFRelease(name);
if (stop) {
break;
}
}
free(devids);
}
- (BOOL)updateDeviceFormat
{
AVAudioFormat *format = _au.outputBusses[0].format;
if (!_deviceFormat || ![_deviceFormat isEqual:format])
{
NSError *err;
AVAudioFormat *renderFormat;
_deviceFormat = format;
deviceFormat = *(format.streamDescription);
///Seems some 3rd party devices return incorrect stuff...or I just don't like noninterleaved data.
deviceFormat.mFormatFlags &= ~kLinearPCMFormatFlagIsNonInterleaved;
// deviceFormat.mFormatFlags &= ~kLinearPCMFormatFlagIsFloat;
// deviceFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
// We don't want more than 8 channels
if (deviceFormat.mChannelsPerFrame > 8) {
deviceFormat.mChannelsPerFrame = 8;
}
deviceFormat.mBytesPerFrame = deviceFormat.mChannelsPerFrame*(deviceFormat.mBitsPerChannel/8);
deviceFormat.mBytesPerPacket = deviceFormat.mBytesPerFrame * deviceFormat.mFramesPerPacket;
/* Set the channel layout for the audio queue */
AudioChannelLayoutTag tag = 0;
switch (deviceFormat.mChannelsPerFrame) {
case 1:
tag = kAudioChannelLayoutTag_Mono;
break;
case 2:
tag = kAudioChannelLayoutTag_Stereo;
break;
case 3:
tag = kAudioChannelLayoutTag_DVD_4;
break;
case 4:
tag = kAudioChannelLayoutTag_Quadraphonic;
break;
case 5:
tag = kAudioChannelLayoutTag_MPEG_5_0_A;
break;
case 6:
tag = kAudioChannelLayoutTag_MPEG_5_1_A;
break;
case 7:
tag = kAudioChannelLayoutTag_MPEG_6_1_A;
break;
case 8:
tag = kAudioChannelLayoutTag_MPEG_7_1_A;
break;
}
renderFormat = [[AVAudioFormat alloc] initWithStreamDescription:&deviceFormat channelLayout:[[AVAudioChannelLayout alloc] initWithLayoutTag:tag]];
[_au.inputBusses[0] setFormat:renderFormat error:&err];
if (err != nil)
return NO;
[outputController setFormat:&deviceFormat];
}
return YES;
}
2006-01-20 15:34:02 +00:00
- (BOOL)setup
2005-09-07 22:33:16 +00:00
{
if (_au)
[self stop];
running = NO;
stopping = NO;
stopped = NO;
paused = NO;
outputDeviceID = -1;
2020-03-23 08:46:41 +00:00
AudioComponentDescription desc;
NSError *err;
2005-09-07 22:33:16 +00:00
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_HALOutput;
2005-09-07 22:33:16 +00:00
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
_au = [[AUAudioUnit alloc] initWithComponentDescription:desc error:&err];
if (err != nil)
return NO;
// Setup the output device before mucking with settings
NSDictionary *device = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] objectForKey:@"outputDevice"];
if (device) {
BOOL ok = [self setOutputDeviceWithDeviceDict:device];
if (!ok) {
//Ruh roh.
[self setOutputDeviceWithDeviceDict:nil];
[[[NSUserDefaultsController sharedUserDefaultsController] defaults] removeObjectForKey:@"outputDevice"];
}
}
else {
[self setOutputDeviceWithDeviceDict:nil];
}
_deviceFormat = nil;
[self updateDeviceFormat];
__block Semaphore * writeSemaphore = self->writeSemaphore;
__block Semaphore * readSemaphore = self->readSemaphore;
__block OutputNode * outputController = self->outputController;
__block float * volume = &self->volume;
__block atomic_long * bytesRendered = &self->bytesRendered;
_au.outputProvider = ^AUAudioUnitStatus(AudioUnitRenderActionFlags * actionFlags, const AudioTimeStamp * timestamp, AUAudioFrameCount frameCount, NSInteger inputBusNumber, AudioBufferList * inputData)
{
void *readPointer = inputData->mBuffers[0].mData;
int amountToRead, amountRead = 0;
amountToRead = inputData->mBuffers[0].mDataByteSize;
if (self->stopping == YES || [outputController shouldContinue] == NO)
{
// Chain is dead, fill out the serial number pointer forever with silence
memset(readPointer, 0, amountToRead);
atomic_fetch_add(bytesRendered, amountToRead);
self->stopping = YES;
return 0;
}
if ([[outputController buffer] isEmpty] && ![outputController chainQueueHasTracks])
{
// Hit end of last track, pad with silence until queue event stops us
memset(readPointer, 0, amountToRead);
atomic_fetch_add(bytesRendered, amountToRead);
return 0;
}
void * readPtr;
int toRead = [[outputController buffer] lengthAvailableToReadReturningPointer:&readPtr];
if (toRead > amountToRead)
toRead = amountToRead;
if (toRead) {
memcpy(readPointer, readPtr, toRead);
amountRead = toRead;
[[outputController buffer] didReadLength:toRead];
[outputController incrementAmountPlayed:amountRead];
atomic_fetch_add(bytesRendered, amountRead);
[writeSemaphore signal];
}
// Try repeatedly! Buffer wraps can cause a slight data shortage, as can
// unexpected track changes.
while ((amountRead < amountToRead) && [outputController shouldContinue] == YES)
{
int amountRead2; //Use this since return type of readdata isnt known...may want to fix then can do a simple += to readdata
amountRead2 = [[outputController buffer] lengthAvailableToReadReturningPointer:&readPtr];
if (amountRead2 > (amountToRead - amountRead))
amountRead2 = amountToRead - amountRead;
if (amountRead2) {
atomic_fetch_add(bytesRendered, amountRead2);
memcpy(readPointer + amountRead, readPtr, amountRead2);
[[outputController buffer] didReadLength:amountRead2];
[outputController incrementAmountPlayed:amountRead2];
amountRead += amountRead2;
[writeSemaphore signal];
}
else {
[readSemaphore timedWait:500];
}
}
int framesRead = amountRead / sizeof(float);
scale_by_volume((float*)readPointer, framesRead, *volume);
if (amountRead < amountToRead)
{
// Either underrun, or no data at all. Caller output tends to just
// buffer loop if it doesn't get anything, so always produce a full
// buffer, and silence anything we couldn't supply.
memset(readPointer + amountRead, 0, amountToRead - amountRead);
}
return 0;
};
[_au allocateRenderResourcesAndReturnError:&err];
return (err == nil);
}
2006-01-20 15:34:02 +00:00
- (void)setVolume:(double)v
{
volume = v * 0.01f;
}
2006-01-20 15:34:02 +00:00
2005-09-07 22:33:16 +00:00
- (void)start
{
[self threadEntry:nil];
2005-09-07 22:33:16 +00:00
}
- (void)stop
{
stopping = YES;
paused = NO;
[writeSemaphore signal];
[readSemaphore signal];
if (listenerapplied) {
AudioObjectPropertyAddress theAddress = {
.mSelector = kAudioHardwarePropertyDefaultOutputDevice,
.mScope = kAudioObjectPropertyScopeGlobal,
.mElement = kAudioObjectPropertyElementMaster
};
AudioObjectRemovePropertyListener(kAudioObjectSystemObject, &theAddress, default_device_changed, (__bridge void * _Nullable)(self));
listenerapplied = NO;
}
if (_au) {
[_au stopHardware];
_au = nil;
}
if (running)
while (!stopped)
{
stopping = YES;
[readSemaphore signal];
[writeSemaphore timedWait:5000];
}
2005-09-07 22:33:16 +00:00
}
- (void)dealloc
{
[self stop];
2007-10-13 07:09:46 +00:00
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.outputDevice"];
}
2006-01-29 14:57:48 +00:00
- (void)pause
{
paused = YES;
[_au stopHardware];
2006-01-29 14:57:48 +00:00
}
- (void)resume
{
NSError *err;
[_au startHardwareAndReturnError:&err];
paused = NO;
2006-01-29 14:57:48 +00:00
}
2005-09-07 22:33:16 +00:00
@end