Implement graphic equalizer
parent
c640481c25
commit
71b2f7a4f2
|
@ -97,6 +97,9 @@
|
|||
- (IBAction)toggleMiniMode:(id)sender;
|
||||
- (IBAction)toggleToolbarStyle:(id)sender;
|
||||
|
||||
@property NSWindow * mainWindow;
|
||||
@property NSWindow * miniWindow;
|
||||
|
||||
@property BOOL miniMode;
|
||||
|
||||
@property (nonatomic) BOOL floatingMiniWindow;
|
||||
|
|
|
@ -29,6 +29,9 @@ void* kAppControllerContext = &kAppControllerContext;
|
|||
BOOL _isFullToolbarStyle;
|
||||
}
|
||||
|
||||
@synthesize mainWindow;
|
||||
@synthesize miniWindow;
|
||||
|
||||
+ (void)initialize
|
||||
{
|
||||
// Register transformers
|
||||
|
@ -552,7 +555,7 @@ void* kAppControllerContext = &kAppControllerContext;
|
|||
|
||||
- (void)clickSpam
|
||||
{
|
||||
[playbackController spam];
|
||||
[playbackController spam:nil];
|
||||
}
|
||||
|
||||
- (void)clickSeek:(NSTimeInterval)position
|
||||
|
|
|
@ -6,6 +6,14 @@
|
|||
#import "CogAudio/Status.h"
|
||||
#import "TrackingSlider.h"
|
||||
#import "AudioScrobbler.h"
|
||||
#import "AppController.h"
|
||||
|
||||
#import <AudioToolbox/AudioToolbox.h>
|
||||
#import <AudioUnit/AudioUnit.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <CoreAudio/CoreAudioTypes.h>
|
||||
|
||||
#import "AUPlayerView.h"
|
||||
|
||||
#define DEFAULT_VOLUME_DOWN 5
|
||||
#define DEFAULT_VOLUME_UP DEFAULT_VOLUME_DOWN
|
||||
|
@ -23,6 +31,8 @@ extern NSDictionary * makeRGInfo(PlaylistEntry *pe);
|
|||
|
||||
@interface PlaybackController : NSObject
|
||||
{
|
||||
IBOutlet AppController *appController;
|
||||
|
||||
IBOutlet PlaylistController *playlistController;
|
||||
IBOutlet PlaylistView *playlistView;
|
||||
IBOutlet PlaylistLoader *playlistLoader;
|
||||
|
@ -43,6 +53,11 @@ extern NSDictionary * makeRGInfo(PlaylistEntry *pe);
|
|||
|
||||
// progress bar display
|
||||
double progressBarStatus;
|
||||
|
||||
BOOL _eqWasOpen;
|
||||
BOOL _eqStubbed;
|
||||
AudioUnit _eq;
|
||||
AUPluginUI *_equi;
|
||||
}
|
||||
|
||||
@property CogStatus playbackStatus;
|
||||
|
@ -73,7 +88,9 @@ extern NSDictionary * makeRGInfo(PlaylistEntry *pe);
|
|||
- (void)seekBackward:(double)amount;
|
||||
- (IBAction)fade:(id)sender;
|
||||
|
||||
- (IBAction)spam;
|
||||
- (IBAction)spam:(id)sender;
|
||||
|
||||
- (IBAction)showEq:(id)sender;
|
||||
|
||||
- (void)sendMetaData;
|
||||
|
||||
|
|
|
@ -43,6 +43,8 @@ NSString *CogPlaybackDidStopNotficiation = @"CogPlaybackDidStopNotficiation";
|
|||
|
||||
seekable = NO;
|
||||
fading = NO;
|
||||
_eqWasOpen = NO;
|
||||
_equi = nil;
|
||||
|
||||
progressBarStatus = -1;
|
||||
|
||||
|
@ -280,7 +282,7 @@ NSDictionary * makeRGInfo(PlaylistEntry *pe)
|
|||
[[playlistController currentEntry] setCurrentPosition:time];
|
||||
}
|
||||
|
||||
- (IBAction)spam
|
||||
- (IBAction)spam:(id)sender
|
||||
{
|
||||
NSPasteboard *pboard = [NSPasteboard generalPasteboard];
|
||||
|
||||
|
@ -550,6 +552,162 @@ NSDictionary * makeRGInfo(PlaylistEntry *pe)
|
|||
[[NSUserDefaults standardUserDefaults] setDouble:[audioPlayer volume] forKey:@"volume"];
|
||||
}
|
||||
|
||||
- (void)showStubEq
|
||||
{
|
||||
// Show a stopped equalizer as a stub
|
||||
OSStatus err;
|
||||
AudioComponentDescription desc;
|
||||
|
||||
desc.componentType = kAudioUnitType_Effect;
|
||||
desc.componentSubType = kAudioUnitSubType_GraphicEQ;
|
||||
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
|
||||
desc.componentFlags = 0;
|
||||
desc.componentFlagsMask = 0;
|
||||
|
||||
AudioComponent comp = NULL;
|
||||
|
||||
desc.componentType = kAudioUnitType_Effect;
|
||||
desc.componentSubType = kAudioUnitSubType_GraphicEQ;
|
||||
|
||||
comp = AudioComponentFindNext(comp, &desc);
|
||||
if (!comp)
|
||||
return;
|
||||
|
||||
err = AudioComponentInstanceNew(comp, &_eq);
|
||||
if (err)
|
||||
return;
|
||||
|
||||
AudioUnitInitialize(_eq);
|
||||
|
||||
_eqStubbed = YES;
|
||||
}
|
||||
|
||||
- (void)hideStubEq
|
||||
{
|
||||
AudioUnitUninitialize(_eq);
|
||||
AudioComponentInstanceDispose(_eq);
|
||||
_eq = NULL;
|
||||
_eqStubbed = NO;
|
||||
}
|
||||
|
||||
- (IBAction)showEq:(id)sender
|
||||
{
|
||||
if (_eq)
|
||||
{
|
||||
if (_equi && [_equi isOpen])
|
||||
[_equi bringToFront];
|
||||
else
|
||||
_equi = [[AUPluginUI alloc] initWithSampler:_eq bringToFront:YES orWindowNumber:0];
|
||||
}
|
||||
else
|
||||
{
|
||||
[self showStubEq];
|
||||
_eqWasOpen = YES;
|
||||
[self audioPlayer:nil displayEqualizer:_eq];
|
||||
[_equi bringToFront];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)audioPlayer:(AudioPlayer *)player displayEqualizer:(AudioUnit)eq
|
||||
{
|
||||
if (_equi)
|
||||
{
|
||||
_eqWasOpen = [_equi isOpen];
|
||||
_equi = nil;
|
||||
}
|
||||
|
||||
if (_eq && _eq != eq) {
|
||||
OSStatus err;
|
||||
CFPropertyListRef classData;
|
||||
UInt32 size;
|
||||
|
||||
size = sizeof(classData);
|
||||
err = AudioUnitGetProperty(_eq, kAudioUnitProperty_ClassInfo, kAudioUnitScope_Global, 0, &classData, &size);
|
||||
if (err == noErr)
|
||||
{
|
||||
CFPreferencesSetAppValue(CFSTR("GraphEQ_Preset"), classData, kCFPreferencesCurrentApplication);
|
||||
CFRelease(classData);
|
||||
}
|
||||
|
||||
CFPreferencesAppSynchronize(kCFPreferencesCurrentApplication);
|
||||
|
||||
if (_eqStubbed)
|
||||
{
|
||||
[self hideStubEq];
|
||||
}
|
||||
}
|
||||
|
||||
_eq = eq;
|
||||
|
||||
{
|
||||
OSStatus err;
|
||||
ComponentDescription cd;
|
||||
CFPropertyListRef classData;
|
||||
CFDictionaryRef dict;
|
||||
CFNumberRef cfnum;
|
||||
|
||||
classData = CFPreferencesCopyAppValue(CFSTR("GraphEQ_Preset"), kCFPreferencesCurrentApplication);
|
||||
if (classData)
|
||||
{
|
||||
dict = (CFDictionaryRef) classData;
|
||||
|
||||
cfnum = (CFNumberRef) (CFDictionaryGetValue(dict, CFSTR("type")));
|
||||
CFNumberGetValue(cfnum, kCFNumberSInt32Type, &cd.componentType);
|
||||
cfnum = (CFNumberRef) (CFDictionaryGetValue(dict, CFSTR("subtype")));
|
||||
CFNumberGetValue(cfnum, kCFNumberSInt32Type, &cd.componentSubType);
|
||||
cfnum = (CFNumberRef) (CFDictionaryGetValue(dict, CFSTR("manufacturer")));
|
||||
CFNumberGetValue(cfnum, kCFNumberSInt32Type, &cd.componentManufacturer);
|
||||
|
||||
if ((cd.componentType == kAudioUnitType_Effect ) &&
|
||||
(cd.componentSubType == kAudioUnitSubType_GraphicEQ) &&
|
||||
(cd.componentManufacturer == kAudioUnitManufacturer_Apple ))
|
||||
err = AudioUnitSetProperty(eq, kAudioUnitProperty_ClassInfo, kAudioUnitScope_Global, 0, &classData, sizeof(classData));
|
||||
|
||||
CFRelease(classData);
|
||||
}
|
||||
}
|
||||
|
||||
if (_eqWasOpen)
|
||||
{
|
||||
NSWindow * window = appController.miniMode ? appController.miniWindow : appController.mainWindow;
|
||||
_equi = [[AUPluginUI alloc] initWithSampler:_eq bringToFront:NO orWindowNumber:window.windowNumber];
|
||||
_eqWasOpen = NO;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)audioPlayer:(AudioPlayer *)player removeEqualizer:(AudioUnit)eq
|
||||
{
|
||||
if (eq == _eq)
|
||||
{
|
||||
OSStatus err;
|
||||
CFPropertyListRef classData;
|
||||
UInt32 size;
|
||||
|
||||
size = sizeof(classData);
|
||||
err = AudioUnitGetProperty(eq, kAudioUnitProperty_ClassInfo, kAudioUnitScope_Global, 0, &classData, &size);
|
||||
if (err == noErr)
|
||||
{
|
||||
CFPreferencesSetAppValue(CFSTR("GraphEQ_Preset"), classData, kCFPreferencesCurrentApplication);
|
||||
CFRelease(classData);
|
||||
}
|
||||
|
||||
CFPreferencesAppSynchronize(kCFPreferencesCurrentApplication);
|
||||
|
||||
if (_equi)
|
||||
{
|
||||
_eqWasOpen = [_equi isOpen];
|
||||
}
|
||||
|
||||
_equi = nil;
|
||||
_eq = nil;
|
||||
|
||||
if (_eqWasOpen)
|
||||
{
|
||||
[self showEq:nil];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)audioPlayer:(AudioPlayer *)player willEndStream:(id)userInfo
|
||||
{
|
||||
PlaylistEntry *curEntry = (PlaylistEntry *)userInfo;
|
||||
|
|
|
@ -10,6 +10,12 @@
|
|||
|
||||
#import <CogAudio/Semaphore.h>
|
||||
|
||||
#import <CoreAudio/CoreAudio.h>
|
||||
#import <AudioToolbox/AudioToolbox.h>
|
||||
#import <AudioUnit/AudioUnit.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <CoreAudio/CoreAudioTypes.h>
|
||||
|
||||
#import <stdatomic.h>
|
||||
|
||||
@class BufferChain;
|
||||
|
@ -95,6 +101,7 @@
|
|||
//- (BufferChain *)bufferChain;
|
||||
- (void)launchOutputThread;
|
||||
- (void)endOfInputPlayed;
|
||||
- (void)sendDelegateMethod:(SEL)selector withVoid:(void*)obj waitUntilDone:(BOOL)wait;
|
||||
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj waitUntilDone:(BOOL)wait;
|
||||
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj withObject:(id)obj2 waitUntilDone:(BOOL)wait;
|
||||
|
||||
|
@ -105,5 +112,8 @@
|
|||
- (void)audioPlayer:(AudioPlayer *)player willEndStream:(id)userInfo; //You must use setNextStream in this method
|
||||
- (void)audioPlayer:(AudioPlayer *)player didBeginStream:(id)userInfo;
|
||||
- (void)audioPlayer:(AudioPlayer *)player didChangeStatus:(id)status userInfo:(id)userInfo;
|
||||
- (void)audioPlayer:(AudioPlayer *)player displayEqualizer:(AudioUnit)eq;
|
||||
- (void)audioPlayer:(AudioPlayer *)player removeEqualizer:(AudioUnit)eq;
|
||||
|
||||
@end
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@
|
|||
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time
|
||||
{
|
||||
[self waitUntilCallbacksExit];
|
||||
|
||||
output = nil;
|
||||
output = [[OutputNode alloc] initWithController:self previous:nil];
|
||||
[output setup];
|
||||
[output setVolume: volume];
|
||||
|
@ -264,6 +264,16 @@
|
|||
[self sendDelegateMethod:@selector(audioPlayer:didBeginStream:) withObject:userInfo waitUntilDone:YES];
|
||||
}
|
||||
|
||||
- (void)beginEqualizer:(AudioUnit)eq
|
||||
{
|
||||
[self sendDelegateMethod:@selector(audioPlayer:displayEqualizer:) withVoid:eq waitUntilDone:YES];
|
||||
}
|
||||
|
||||
- (void)endEqualizer:(AudioUnit)eq
|
||||
{
|
||||
[self sendDelegateMethod:@selector(audioPlayer:removeEqualizer:) withVoid:eq waitUntilDone:YES];
|
||||
}
|
||||
|
||||
- (void)addChainToQueue:(BufferChain *)newChain
|
||||
{
|
||||
[newChain setUserInfo: nextStreamUserInfo];
|
||||
|
@ -458,6 +468,18 @@
|
|||
return NO;
|
||||
}
|
||||
|
||||
- (void)sendDelegateMethod:(SEL)selector withVoid:(void*)obj waitUntilDone:(BOOL)wait
|
||||
{
|
||||
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:[delegate methodSignatureForSelector:selector]];
|
||||
[invocation setTarget:delegate];
|
||||
[invocation setSelector:selector];
|
||||
[invocation setArgument:(void*)&self atIndex:2];
|
||||
[invocation setArgument:&obj atIndex:3];
|
||||
[invocation retainArguments];
|
||||
|
||||
[invocation performSelectorOnMainThread:@selector(invoke) withObject:nil waitUntilDone:wait];
|
||||
}
|
||||
|
||||
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj waitUntilDone:(BOOL)wait
|
||||
{
|
||||
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:[delegate methodSignatureForSelector:selector]];
|
||||
|
|
|
@ -1291,21 +1291,6 @@ static float db_to_scale(float db)
|
|||
break;
|
||||
}
|
||||
|
||||
for (;;)
|
||||
{
|
||||
void * ptr;
|
||||
BufferChain * bufferChain = controller;
|
||||
AudioPlayer * audioPlayer = [bufferChain controller];
|
||||
VirtualRingBuffer * buffer = [[audioPlayer output] buffer];
|
||||
dataRead = [buffer lengthAvailableToReadReturningPointer:&ptr];
|
||||
if (dataRead) {
|
||||
[refillNode writeData:(float*)ptr floatCount:dataRead / sizeof(float)];
|
||||
[buffer didReadLength:dataRead];
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
|
||||
[self setupWithInputFormat:previousOutputFormat outputFormat:outputFormat];
|
||||
}
|
||||
else
|
||||
|
|
|
@ -26,6 +26,9 @@
|
|||
BOOL started;
|
||||
}
|
||||
|
||||
- (void)beginEqualizer:(AudioUnit)eq;
|
||||
- (void)endEqualizer:(AudioUnit)eq;
|
||||
|
||||
- (double)amountPlayed;
|
||||
|
||||
- (void)incrementAmountPlayed:(long)count;
|
||||
|
|
|
@ -156,4 +156,14 @@
|
|||
{
|
||||
return paused;
|
||||
}
|
||||
|
||||
- (void)beginEqualizer:(AudioUnit)eq
|
||||
{
|
||||
[controller beginEqualizer:eq];
|
||||
}
|
||||
|
||||
- (void)endEqualizer:(AudioUnit)eq
|
||||
{
|
||||
[controller endEqualizer:eq];
|
||||
}
|
||||
@end
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#import <AudioToolbox/AudioToolbox.h>
|
||||
#import <AudioUnit/AudioUnit.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <CoreAudio/CoreAudioTypes.h>
|
||||
|
||||
#import <stdatomic.h>
|
||||
|
||||
|
@ -45,6 +46,8 @@
|
|||
|
||||
AUAudioUnit *_au;
|
||||
size_t _bufferSize;
|
||||
|
||||
AudioUnit _eq;
|
||||
}
|
||||
|
||||
- (id)initWithController:(OutputNode *)c;
|
||||
|
|
|
@ -15,6 +15,123 @@ extern void scale_by_volume(float * buffer, size_t count, float volume);
|
|||
|
||||
@implementation OutputCoreAudio
|
||||
|
||||
static void fillBuffers(AudioBufferList *ioData, float * inbuffer, size_t count, size_t offset)
|
||||
{
|
||||
const size_t channels = ioData->mNumberBuffers;
|
||||
for (int i = 0; i < channels; ++i)
|
||||
{
|
||||
size_t maxCount = (ioData->mBuffers[i].mDataByteSize / sizeof(float)) - offset;
|
||||
float * output = ((float *)ioData->mBuffers[i].mData) + offset;
|
||||
float * input = inbuffer + i;
|
||||
for (size_t j = offset, k = (count > maxCount) ? maxCount : count; j < k; ++j)
|
||||
{
|
||||
*output = *input;
|
||||
output++;
|
||||
input += channels;
|
||||
}
|
||||
ioData->mBuffers[i].mNumberChannels = 1;
|
||||
}
|
||||
}
|
||||
|
||||
static void clearBuffers(AudioBufferList *ioData, size_t count, size_t offset)
|
||||
{
|
||||
for (int i = 0; i < ioData->mNumberBuffers; ++i)
|
||||
{
|
||||
memset(ioData->mBuffers[i].mData + offset * sizeof(float), 0, count * sizeof(float));
|
||||
ioData->mBuffers[i].mNumberChannels = 1;
|
||||
}
|
||||
}
|
||||
|
||||
static void scaleBuffersByVolume(AudioBufferList *ioData, float volume)
|
||||
{
|
||||
if (volume != 1.0)
|
||||
{
|
||||
for (int i = 0; i < ioData->mNumberBuffers; ++i)
|
||||
{
|
||||
scale_by_volume((float*)ioData->mBuffers[i].mData, ioData->mBuffers[i].mDataByteSize / sizeof(float), volume);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static OSStatus renderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData )
|
||||
{
|
||||
OutputCoreAudio * _self = (__bridge OutputCoreAudio *) inRefCon;
|
||||
|
||||
const int channels = _self->deviceFormat.mChannelsPerFrame;
|
||||
const int bytesPerPacket = channels * sizeof(float);
|
||||
|
||||
int amountToRead, amountRead = 0;
|
||||
|
||||
amountToRead = inNumberFrames * bytesPerPacket;
|
||||
|
||||
if (_self->stopping == YES || [_self->outputController shouldContinue] == NO)
|
||||
{
|
||||
// Chain is dead, fill out the serial number pointer forever with silence
|
||||
clearBuffers(ioData, amountToRead / bytesPerPacket, 0);
|
||||
atomic_fetch_add(&_self->bytesRendered, amountToRead);
|
||||
_self->stopping = YES;
|
||||
return 0;
|
||||
}
|
||||
|
||||
if ([[_self->outputController buffer] isEmpty] && ![_self->outputController chainQueueHasTracks])
|
||||
{
|
||||
// Hit end of last track, pad with silence until queue event stops us
|
||||
clearBuffers(ioData, amountToRead / bytesPerPacket, 0);
|
||||
atomic_fetch_add(&_self->bytesRendered, amountToRead);
|
||||
return 0;
|
||||
}
|
||||
|
||||
void * readPtr;
|
||||
int toRead = [[_self->outputController buffer] lengthAvailableToReadReturningPointer:&readPtr];
|
||||
|
||||
if (toRead > amountToRead)
|
||||
toRead = amountToRead;
|
||||
|
||||
if (toRead) {
|
||||
fillBuffers(ioData, (float*)readPtr, toRead / bytesPerPacket, 0);
|
||||
amountRead = toRead;
|
||||
[[_self->outputController buffer] didReadLength:toRead];
|
||||
[_self->outputController incrementAmountPlayed:amountRead];
|
||||
atomic_fetch_add(&_self->bytesRendered, amountRead);
|
||||
[_self->writeSemaphore signal];
|
||||
}
|
||||
|
||||
// Try repeatedly! Buffer wraps can cause a slight data shortage, as can
|
||||
// unexpected track changes.
|
||||
while ((amountRead < amountToRead) && [_self->outputController shouldContinue] == YES)
|
||||
{
|
||||
int amountRead2; //Use this since return type of readdata isnt known...may want to fix then can do a simple += to readdata
|
||||
amountRead2 = [[_self->outputController buffer] lengthAvailableToReadReturningPointer:&readPtr];
|
||||
if (amountRead2 > (amountToRead - amountRead))
|
||||
amountRead2 = amountToRead - amountRead;
|
||||
if (amountRead2) {
|
||||
atomic_fetch_add(&_self->bytesRendered, amountRead2);
|
||||
fillBuffers(ioData, (float*)readPtr, amountRead2 / bytesPerPacket, amountRead / bytesPerPacket);
|
||||
[[_self->outputController buffer] didReadLength:amountRead2];
|
||||
|
||||
[_self->outputController incrementAmountPlayed:amountRead2];
|
||||
|
||||
amountRead += amountRead2;
|
||||
[_self->writeSemaphore signal];
|
||||
}
|
||||
else {
|
||||
[_self->readSemaphore timedWait:500];
|
||||
}
|
||||
}
|
||||
|
||||
scaleBuffersByVolume(ioData, _self->volume);
|
||||
|
||||
if (amountRead < amountToRead)
|
||||
{
|
||||
// Either underrun, or no data at all. Caller output tends to just
|
||||
// buffer loop if it doesn't get anything, so always produce a full
|
||||
// buffer, and silence anything we couldn't supply.
|
||||
clearBuffers(ioData, amountToRead - amountRead, amountRead / bytesPerPacket);
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
|
||||
- (id)initWithController:(OutputNode *)c
|
||||
{
|
||||
self = [super init];
|
||||
|
@ -22,6 +139,7 @@ extern void scale_by_volume(float * buffer, size_t count, float volume);
|
|||
{
|
||||
outputController = c;
|
||||
_au = nil;
|
||||
_eq = NULL;
|
||||
_bufferSize = 0;
|
||||
volume = 1.0;
|
||||
outputDeviceID = -1;
|
||||
|
@ -374,6 +492,20 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
|||
return NO;
|
||||
|
||||
[outputController setFormat:&deviceFormat];
|
||||
|
||||
AudioStreamBasicDescription asbd = deviceFormat;
|
||||
|
||||
asbd.mFormatFlags &= ~kAudioFormatFlagIsPacked;
|
||||
|
||||
AudioUnitSetProperty (_eq, kAudioUnitProperty_StreamFormat,
|
||||
kAudioUnitScope_Input, 0, &asbd, sizeof (asbd));
|
||||
|
||||
AudioUnitSetProperty (_eq, kAudioUnitProperty_StreamFormat,
|
||||
kAudioUnitScope_Output, 0, &asbd, sizeof (asbd));
|
||||
AudioUnitReset (_eq, kAudioUnitScope_Input, 0);
|
||||
AudioUnitReset (_eq, kAudioUnitScope_Output, 0);
|
||||
|
||||
AudioUnitReset (_eq, kAudioUnitScope_Global, 0);
|
||||
}
|
||||
|
||||
return YES;
|
||||
|
@ -419,92 +551,93 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
|||
}
|
||||
|
||||
_deviceFormat = nil;
|
||||
|
||||
AudioComponent comp = NULL;
|
||||
|
||||
desc.componentType = kAudioUnitType_Effect;
|
||||
desc.componentSubType = kAudioUnitSubType_GraphicEQ;
|
||||
|
||||
comp = AudioComponentFindNext(comp, &desc);
|
||||
if (!comp)
|
||||
return NO;
|
||||
|
||||
OSStatus _err = AudioComponentInstanceNew(comp, &_eq);
|
||||
if (err)
|
||||
return NO;
|
||||
|
||||
[self updateDeviceFormat];
|
||||
|
||||
__block Semaphore * writeSemaphore = self->writeSemaphore;
|
||||
__block Semaphore * readSemaphore = self->readSemaphore;
|
||||
__block OutputNode * outputController = self->outputController;
|
||||
__block float * volume = &self->volume;
|
||||
__block atomic_long * bytesRendered = &self->bytesRendered;
|
||||
|
||||
_au.outputProvider = ^AUAudioUnitStatus(AudioUnitRenderActionFlags * actionFlags, const AudioTimeStamp * timestamp, AUAudioFrameCount frameCount, NSInteger inputBusNumber, AudioBufferList * inputData)
|
||||
__block AudioUnit eq = _eq;
|
||||
__block AudioStreamBasicDescription *format = &deviceFormat;
|
||||
|
||||
_au.outputProvider = ^AUAudioUnitStatus(AudioUnitRenderActionFlags * _Nonnull actionFlags, const AudioTimeStamp * _Nonnull timestamp, AUAudioFrameCount frameCount, NSInteger inputBusNumber, AudioBufferList * _Nonnull inputData)
|
||||
{
|
||||
void *readPointer = inputData->mBuffers[0].mData;
|
||||
// This expects multiple buffers, so:
|
||||
int i;
|
||||
const int channels = format->mChannelsPerFrame;
|
||||
const int channelsminusone = channels - 1;
|
||||
float buffers[frameCount * format->mChannelsPerFrame];
|
||||
uint8_t bufferlistbuffer[sizeof(AudioBufferList) + sizeof(AudioBuffer) * channelsminusone];
|
||||
AudioBufferList * ioData = (AudioBufferList *)(bufferlistbuffer);
|
||||
|
||||
int amountToRead, amountRead = 0;
|
||||
|
||||
amountToRead = inputData->mBuffers[0].mDataByteSize;
|
||||
ioData->mNumberBuffers = channels;
|
||||
|
||||
if (self->stopping == YES || [outputController shouldContinue] == NO)
|
||||
{
|
||||
// Chain is dead, fill out the serial number pointer forever with silence
|
||||
memset(readPointer, 0, amountToRead);
|
||||
atomic_fetch_add(bytesRendered, amountToRead);
|
||||
self->stopping = YES;
|
||||
return 0;
|
||||
memset(buffers, 0, sizeof(buffers));
|
||||
|
||||
for (i = 0; i < channels; ++i) {
|
||||
ioData->mBuffers[i].mNumberChannels = 1;
|
||||
ioData->mBuffers[i].mData = buffers + frameCount * i;
|
||||
ioData->mBuffers[i].mDataByteSize = frameCount * sizeof(float);
|
||||
}
|
||||
|
||||
if ([[outputController buffer] isEmpty] && ![outputController chainQueueHasTracks])
|
||||
{
|
||||
// Hit end of last track, pad with silence until queue event stops us
|
||||
memset(readPointer, 0, amountToRead);
|
||||
atomic_fetch_add(bytesRendered, amountToRead);
|
||||
return 0;
|
||||
}
|
||||
OSStatus ret = AudioUnitRender(eq, actionFlags, timestamp, (UInt32) inputBusNumber, frameCount, ioData);
|
||||
|
||||
void * readPtr;
|
||||
int toRead = [[outputController buffer] lengthAvailableToReadReturningPointer:&readPtr];
|
||||
if (ret)
|
||||
return ret;
|
||||
|
||||
if (toRead > amountToRead)
|
||||
toRead = amountToRead;
|
||||
|
||||
if (toRead) {
|
||||
memcpy(readPointer, readPtr, toRead);
|
||||
amountRead = toRead;
|
||||
[[outputController buffer] didReadLength:toRead];
|
||||
[outputController incrementAmountPlayed:amountRead];
|
||||
atomic_fetch_add(bytesRendered, amountRead);
|
||||
[writeSemaphore signal];
|
||||
}
|
||||
|
||||
// Try repeatedly! Buffer wraps can cause a slight data shortage, as can
|
||||
// unexpected track changes.
|
||||
while ((amountRead < amountToRead) && [outputController shouldContinue] == YES)
|
||||
{
|
||||
int amountRead2; //Use this since return type of readdata isnt known...may want to fix then can do a simple += to readdata
|
||||
amountRead2 = [[outputController buffer] lengthAvailableToReadReturningPointer:&readPtr];
|
||||
if (amountRead2 > (amountToRead - amountRead))
|
||||
amountRead2 = amountToRead - amountRead;
|
||||
if (amountRead2) {
|
||||
atomic_fetch_add(bytesRendered, amountRead2);
|
||||
|
||||
memcpy(readPointer + amountRead, readPtr, amountRead2);
|
||||
[[outputController buffer] didReadLength:amountRead2];
|
||||
|
||||
[outputController incrementAmountPlayed:amountRead2];
|
||||
|
||||
amountRead += amountRead2;
|
||||
[writeSemaphore signal];
|
||||
}
|
||||
else {
|
||||
[readSemaphore timedWait:500];
|
||||
for (i = 0; i < channels; ++i) {
|
||||
float * outBuffer = ((float*)inputData->mBuffers[0].mData) + i;
|
||||
float * inBuffer = ((float*)ioData->mBuffers[i].mData);
|
||||
int frameCount = ioData->mBuffers[i].mDataByteSize / sizeof(float);
|
||||
for (int j = 0; j < frameCount; ++j) {
|
||||
*outBuffer = *inBuffer;
|
||||
inBuffer++;
|
||||
outBuffer += channels;
|
||||
}
|
||||
}
|
||||
|
||||
int framesRead = amountRead / sizeof(float);
|
||||
scale_by_volume((float*)readPointer, framesRead, *volume);
|
||||
|
||||
if (amountRead < amountToRead)
|
||||
{
|
||||
// Either underrun, or no data at all. Caller output tends to just
|
||||
// buffer loop if it doesn't get anything, so always produce a full
|
||||
// buffer, and silence anything we couldn't supply.
|
||||
memset(readPointer + amountRead, 0, amountToRead - amountRead);
|
||||
}
|
||||
inputData->mBuffers[0].mNumberChannels = channels;
|
||||
|
||||
return 0;
|
||||
};
|
||||
|
||||
UInt32 value;
|
||||
UInt32 size = sizeof(value);
|
||||
|
||||
value = CHUNK_SIZE;
|
||||
AudioUnitSetProperty (_eq, kAudioUnitProperty_MaximumFramesPerSlice,
|
||||
kAudioUnitScope_Global, 0, &value, size);
|
||||
|
||||
value = 127;
|
||||
AudioUnitSetProperty (_eq, kAudioUnitProperty_RenderQuality,
|
||||
kAudioUnitScope_Global, 0, &value, size);
|
||||
|
||||
AURenderCallbackStruct callbackStruct;
|
||||
callbackStruct.inputProcRefCon = (__bridge void *)self;
|
||||
callbackStruct.inputProc = renderCallback;
|
||||
AudioUnitSetProperty (_eq, kAudioUnitProperty_SetRenderCallback,
|
||||
kAudioUnitScope_Input, 0, &callbackStruct, sizeof(callbackStruct));
|
||||
|
||||
AudioUnitReset (_eq, kAudioUnitScope_Input, 0);
|
||||
AudioUnitReset (_eq, kAudioUnitScope_Output, 0);
|
||||
|
||||
AudioUnitReset (_eq, kAudioUnitScope_Global, 0);
|
||||
|
||||
_err = AudioUnitInitialize(_eq);
|
||||
if (_err)
|
||||
return NO;
|
||||
|
||||
[outputController beginEqualizer:_eq];
|
||||
|
||||
[_au allocateRenderResourcesAndReturnError:&err];
|
||||
|
||||
|
@ -547,6 +680,13 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
|||
[readSemaphore signal];
|
||||
[writeSemaphore timedWait:5000];
|
||||
}
|
||||
if (_eq)
|
||||
{
|
||||
[outputController endEqualizer:_eq];
|
||||
AudioUnitUninitialize(_eq);
|
||||
AudioComponentInstanceDispose(_eq);
|
||||
_eq = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
|
|
|
@ -994,6 +994,12 @@
|
|||
<action selector="toggleSideView:" target="2172" id="2419"/>
|
||||
</connections>
|
||||
</menuItem>
|
||||
<menuItem title="Show Equalizer" id="nBU-pH-J3I">
|
||||
<modifierMask key="keyEquivalentModifierMask"/>
|
||||
<connections>
|
||||
<action selector="showEq:" target="705" id="8VC-ml-1Zz"/>
|
||||
</connections>
|
||||
</menuItem>
|
||||
<menuItem isSeparatorItem="YES" id="1854">
|
||||
<modifierMask key="keyEquivalentModifierMask" command="YES"/>
|
||||
</menuItem>
|
||||
|
@ -1653,6 +1659,7 @@ Gw
|
|||
</menu>
|
||||
<customObject id="705" userLabel="PlaybackController" customClass="PlaybackController">
|
||||
<connections>
|
||||
<outlet property="appController" destination="226" id="TnP-DA-nJl"/>
|
||||
<outlet property="playlistController" destination="218" id="706"/>
|
||||
<outlet property="playlistLoader" destination="1319" id="ghZ-65-60L"/>
|
||||
<outlet property="playlistView" destination="207" id="717"/>
|
||||
|
|
|
@ -168,6 +168,7 @@
|
|||
83D0380F24A40DFB004CF90F /* CogAssets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 83D0380E24A40DF2004CF90F /* CogAssets.xcassets */; };
|
||||
83E5E54C18087CA5001F3284 /* miniModeOffTemplate.pdf in Resources */ = {isa = PBXBuildFile; fileRef = 83E5E54A18087CA5001F3284 /* miniModeOffTemplate.pdf */; };
|
||||
83E5E54D18087CA5001F3284 /* miniModeOnTemplate.pdf in Resources */ = {isa = PBXBuildFile; fileRef = 83E5E54B18087CA5001F3284 /* miniModeOnTemplate.pdf */; };
|
||||
83E88FD227945204003D6FE5 /* AUPlayerView.m in Sources */ = {isa = PBXBuildFile; fileRef = 83E88FD027945204003D6FE5 /* AUPlayerView.m */; };
|
||||
83F9D8071A884C54007ABEC2 /* SilenceDecoder.bundle in CopyFiles */ = {isa = PBXBuildFile; fileRef = 83F9D7F61A884B46007ABEC2 /* SilenceDecoder.bundle */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
|
||||
8D11072B0486CEB800E47090 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 089C165CFE840E0CC02AAC07 /* InfoPlist.strings */; };
|
||||
8D11072D0486CEB800E47090 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 29B97316FDCFA39411CA2CEA /* main.m */; settings = {ATTRIBUTES = (); }; };
|
||||
|
@ -1001,6 +1002,8 @@
|
|||
83E5E54A18087CA5001F3284 /* miniModeOffTemplate.pdf */ = {isa = PBXFileReference; lastKnownFileType = image.pdf; name = miniModeOffTemplate.pdf; path = Images/miniModeOffTemplate.pdf; sourceTree = "<group>"; };
|
||||
83E5E54B18087CA5001F3284 /* miniModeOnTemplate.pdf */ = {isa = PBXFileReference; lastKnownFileType = image.pdf; name = miniModeOnTemplate.pdf; path = Images/miniModeOnTemplate.pdf; sourceTree = "<group>"; };
|
||||
83E5EFAC1FFEF78100659F0F /* OpenMPT.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = OpenMPT.xcodeproj; path = Plugins/OpenMPT/OpenMPT.xcodeproj; sourceTree = "<group>"; };
|
||||
83E88FD027945204003D6FE5 /* AUPlayerView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = AUPlayerView.m; path = Window/AUPlayerView.m; sourceTree = "<group>"; };
|
||||
83E88FD127945204003D6FE5 /* AUPlayerView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = AUPlayerView.h; path = Window/AUPlayerView.h; sourceTree = "<group>"; };
|
||||
83F9D7F11A884B44007ABEC2 /* SilenceDecoder.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = SilenceDecoder.xcodeproj; path = Plugins/SilenceDecoder/SilenceDecoder.xcodeproj; sourceTree = "<group>"; };
|
||||
8D1107310486CEB800E47090 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; };
|
||||
8E07AB760AAC930B00A4B32F /* PreferencesController.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; name = PreferencesController.h; path = Preferences/PreferencesController.h; sourceTree = "<group>"; };
|
||||
|
@ -1366,6 +1369,8 @@
|
|||
17E0D5D20F520E75005B6FED /* Window */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
83E88FD127945204003D6FE5 /* AUPlayerView.h */,
|
||||
83E88FD027945204003D6FE5 /* AUPlayerView.m */,
|
||||
83BC5AB120E4C87100631CD4 /* DualWindow.h */,
|
||||
83BC5AB020E4C87100631CD4 /* DualWindow.m */,
|
||||
17E0D5E10F520F02005B6FED /* MainWindow.h */,
|
||||
|
@ -2449,6 +2454,7 @@
|
|||
179D031F0E0CB2500064A77A /* ContainerNode.m in Sources */,
|
||||
839DA7CF274A2D4C001B18E5 /* NSDictionary+Merge.m in Sources */,
|
||||
179D03200E0CB2500064A77A /* DirectoryNode.m in Sources */,
|
||||
83E88FD227945204003D6FE5 /* AUPlayerView.m in Sources */,
|
||||
179D03210E0CB2500064A77A /* FileIconCell.m in Sources */,
|
||||
179D03220E0CB2500064A77A /* FileNode.m in Sources */,
|
||||
179D03230E0CB2500064A77A /* FileTreeDataSource.m in Sources */,
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
//
|
||||
// AUPlayerView.h
|
||||
// Output
|
||||
//
|
||||
// Created by Christopher Snowhill on 1/29/16.
|
||||
// Copyright © 2016-2022 Christopher Snowhill. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __AUPlayerView_h__
|
||||
#define __AUPlayerView_h__
|
||||
|
||||
#import <AppKit/AppKit.h>
|
||||
#import <AudioUnit/AudioUnitCarbonView.h>
|
||||
#import <AudioUnit/AudioUnit.h>
|
||||
|
||||
@interface AUPluginUI : NSObject
|
||||
{
|
||||
AudioUnit au;
|
||||
int prefheight;
|
||||
int prefwidth;
|
||||
|
||||
BOOL windowOpen;
|
||||
|
||||
BOOL resizable;
|
||||
int min_width;
|
||||
int min_height;
|
||||
int req_width;
|
||||
int req_height;
|
||||
int alo_width;
|
||||
int alo_height;
|
||||
|
||||
/* Cocoa */
|
||||
|
||||
NSWindow* cocoa_window;
|
||||
NSView* au_view;
|
||||
NSRect last_au_frame;
|
||||
}
|
||||
|
||||
- (id) initWithSampler:(AudioUnit)_au bringToFront:(BOOL)front orWindowNumber:(NSInteger)window;
|
||||
- (void) dealloc;
|
||||
|
||||
- (BOOL) isOpen;
|
||||
- (BOOL) isForeground;
|
||||
|
||||
- (void) bringToFront;
|
||||
|
||||
- (NSInteger) windowNumber;
|
||||
|
||||
@end
|
||||
|
||||
#endif
|
|
@ -0,0 +1,233 @@
|
|||
//
|
||||
// AUPlayerView.m
|
||||
// Output
|
||||
//
|
||||
// Created by Christopher Snowhill on 1/29/16.
|
||||
// Copyright © 2016-2022 Christopher Snowhill. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <AudioUnit/AUCocoaUIView.h>
|
||||
#import <CoreAudioKit/AUGenericView.h>
|
||||
|
||||
#import "AUPlayerView.h"
|
||||
|
||||
@interface AUPluginUI (Private)
|
||||
- (BOOL)test_cocoa_view_support;
|
||||
- (int)create_cocoa_view;
|
||||
- (BOOL)plugin_class_valid:(Class)pluginClass;
|
||||
@end
|
||||
|
||||
@implementation AUPluginUI
|
||||
|
||||
- (id) initWithSampler:(AudioUnit)_au bringToFront:(BOOL)front orWindowNumber:(NSInteger)window
|
||||
{
|
||||
self = [super init];
|
||||
if (self)
|
||||
{
|
||||
au = _au;
|
||||
resizable = NO;
|
||||
min_width = 0;
|
||||
min_height = 0;
|
||||
req_width = 0;
|
||||
req_height = 0;
|
||||
alo_width = 0;
|
||||
alo_height = 0;
|
||||
|
||||
windowOpen = NO;
|
||||
|
||||
cocoa_window = nil;
|
||||
au_view = nil;
|
||||
|
||||
if ([self test_cocoa_view_support])
|
||||
{
|
||||
[self create_cocoa_view];
|
||||
}
|
||||
|
||||
if (au_view)
|
||||
{
|
||||
cocoa_window = [[NSWindow alloc] initWithContentRect:NSMakeRect(0, 0, req_width, req_height)
|
||||
styleMask:(NSWindowStyleMaskTitled |
|
||||
NSWindowStyleMaskClosable)
|
||||
backing:NSBackingStoreBuffered
|
||||
defer:NO];
|
||||
|
||||
[cocoa_window setAutodisplay:YES];
|
||||
[cocoa_window setOneShot:YES];
|
||||
|
||||
[cocoa_window setContentView:au_view];
|
||||
|
||||
if (front)
|
||||
{
|
||||
[cocoa_window orderFront:cocoa_window];
|
||||
}
|
||||
else
|
||||
[cocoa_window orderWindow:NSWindowBelow relativeTo:window];
|
||||
|
||||
[cocoa_window setReleasedWhenClosed:NO];
|
||||
|
||||
[cocoa_window setFrameUsingName:@"EqualizerWindowPosition"];
|
||||
|
||||
windowOpen = YES;
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(windowClosed:) name:NSWindowWillCloseNotification object:cocoa_window];
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void) dealloc
|
||||
{
|
||||
if (windowOpen)
|
||||
{
|
||||
[self windowClosed:nil];
|
||||
}
|
||||
[cocoa_window close];
|
||||
cocoa_window = nil;
|
||||
au_view = nil;
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
}
|
||||
|
||||
- (BOOL) isOpen
|
||||
{
|
||||
return windowOpen;
|
||||
}
|
||||
|
||||
- (void) bringToFront
|
||||
{
|
||||
[cocoa_window orderFront:cocoa_window];
|
||||
}
|
||||
|
||||
- (NSInteger) windowNumber
|
||||
{
|
||||
return cocoa_window.windowNumber;
|
||||
}
|
||||
|
||||
- (void)windowClosed:(NSNotification*)notification
|
||||
{
|
||||
[cocoa_window saveFrameUsingName:@"EqualizerWindowPosition"];
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
windowOpen = NO;
|
||||
}
|
||||
|
||||
- (BOOL)test_cocoa_view_support
|
||||
{
|
||||
UInt32 dataSize = 0;
|
||||
Boolean isWritable = 0;
|
||||
OSStatus err = AudioUnitGetPropertyInfo(au,
|
||||
kAudioUnitProperty_CocoaUI, kAudioUnitScope_Global,
|
||||
0, &dataSize, &isWritable);
|
||||
|
||||
return dataSize > 0 && err == noErr;
|
||||
}
|
||||
|
||||
- (BOOL) plugin_class_valid: (Class)pluginClass
|
||||
{
|
||||
if([pluginClass conformsToProtocol: @protocol(AUCocoaUIBase)]) {
|
||||
if([pluginClass instancesRespondToSelector: @selector(interfaceVersion)] &&
|
||||
[pluginClass instancesRespondToSelector: @selector(uiViewForAudioUnit:withSize:)]) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
- (int)create_cocoa_view
|
||||
{
|
||||
bool wasAbleToLoadCustomView = false;
|
||||
AudioUnitCocoaViewInfo* cocoaViewInfo = NULL;
|
||||
UInt32 numberOfClasses = 0;
|
||||
UInt32 dataSize;
|
||||
Boolean isWritable;
|
||||
NSString* factoryClassName = 0;
|
||||
NSURL* CocoaViewBundlePath = NULL;
|
||||
|
||||
OSStatus result = AudioUnitGetPropertyInfo (au,
|
||||
kAudioUnitProperty_CocoaUI,
|
||||
kAudioUnitScope_Global,
|
||||
0,
|
||||
&dataSize,
|
||||
&isWritable );
|
||||
|
||||
numberOfClasses = (dataSize - sizeof(CFURLRef)) / sizeof(CFStringRef);
|
||||
|
||||
// Does view have custom Cocoa UI?
|
||||
|
||||
if ((result == noErr) && (numberOfClasses > 0) ) {
|
||||
|
||||
cocoaViewInfo = (AudioUnitCocoaViewInfo *)malloc(dataSize);
|
||||
|
||||
if(AudioUnitGetProperty(au,
|
||||
kAudioUnitProperty_CocoaUI,
|
||||
kAudioUnitScope_Global,
|
||||
0,
|
||||
cocoaViewInfo,
|
||||
&dataSize) == noErr) {
|
||||
|
||||
CocoaViewBundlePath = (__bridge NSURL *)cocoaViewInfo->mCocoaAUViewBundleLocation;
|
||||
|
||||
// we only take the first view in this example.
|
||||
factoryClassName = (__bridge NSString *)cocoaViewInfo->mCocoaAUViewClass[0];
|
||||
} else {
|
||||
if (cocoaViewInfo != NULL) {
|
||||
free (cocoaViewInfo);
|
||||
cocoaViewInfo = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// [A] Show custom UI if view has it
|
||||
|
||||
if (CocoaViewBundlePath && factoryClassName) {
|
||||
NSBundle *viewBundle = [NSBundle bundleWithPath:[CocoaViewBundlePath path]];
|
||||
|
||||
if (viewBundle == NULL) {
|
||||
return -1;
|
||||
} else {
|
||||
Class factoryClass = [viewBundle classNamed:factoryClassName];
|
||||
if (!factoryClass) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// make sure 'factoryClass' implements the AUCocoaUIBase protocol
|
||||
if (![self plugin_class_valid: factoryClass]) {
|
||||
return -1;
|
||||
}
|
||||
// make a factory
|
||||
id factory = [[factoryClass alloc] init];
|
||||
if (factory == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// make a view
|
||||
au_view = [factory uiViewForAudioUnit:au withSize:NSZeroSize];
|
||||
|
||||
// cleanup
|
||||
if (cocoaViewInfo) {
|
||||
UInt32 i;
|
||||
for (i = 0; i < numberOfClasses; i++)
|
||||
CFRelease(cocoaViewInfo->mCocoaAUViewClass[i]);
|
||||
|
||||
free (cocoaViewInfo);
|
||||
}
|
||||
wasAbleToLoadCustomView = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!wasAbleToLoadCustomView) {
|
||||
// load generic Cocoa view
|
||||
au_view = [[AUGenericView alloc] initWithAudioUnit:au];
|
||||
[(AUGenericView *)au_view setShowsExpertParameters:1];
|
||||
}
|
||||
|
||||
// Get the initial size of the new AU View's frame
|
||||
NSRect frame = [au_view frame];
|
||||
min_width = req_width = CGRectGetWidth(NSRectToCGRect(frame));
|
||||
min_height = req_height = CGRectGetHeight(NSRectToCGRect(frame));
|
||||
resizable = [au_view autoresizingMask];
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@end
|
Loading…
Reference in New Issue