Fixed input to float conversion and made it mandatory, so I could move volume scaling to the converter node

CQTexperiment
Chris Moeller 2013-10-07 03:59:04 -07:00
parent 8aa01894ee
commit 15c545b10d
5 changed files with 190 additions and 369 deletions

View File

@ -106,7 +106,7 @@
[rgi retain];
[rgInfo release];
rgInfo = rgi;
[inputNode setRGInfo:rgi];
[converterNode setRGInfo:rgi];
}
- (NSDictionary *)rgInfo

View File

@ -15,26 +15,36 @@
#import "Node.h"
@interface ConverterNode : Node {
NSDictionary * rgInfo;
AudioConverterRef converter;
AudioConverterRef converterDownmix;
AudioConverterRef converterFloat;
void *callbackBuffer;
void *downmixBuffer;
int downmixSize, downmixOffset;
float volumeScale;
void *floatBuffer;
int floatSize, floatOffset;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription downmixFormat;
AudioStreamBasicDescription floatFormat;
AudioStreamBasicDescription outputFormat;
}
- (void)registerObservers;
- (BOOL)setupWithInputFormat:(AudioStreamBasicDescription)inputFormat outputFormat:(AudioStreamBasicDescription)outputFormat;
- (void)cleanUp;
- (void)process;
- (int)convert:(void *)dest amount:(int)amount;
- (void)setRGInfo:(NSDictionary *)rgi;
- (void)setOutputFormat:(AudioStreamBasicDescription)format;
- (void)inputFormatDidChange:(AudioStreamBasicDescription)format;
- (void)refreshVolumeScaling;
@end

View File

@ -75,6 +75,13 @@ static void downmix_to_stereo(float * buffer, int channels, int count)
}
}
static void scale_by_volume(float * buffer, int count, float volume)
{
if ( volume != 1.0 )
for (int i = 0; i < count; ++i )
buffer[i] *= volume;
}
//called from the complexfill when the audio is converted...good clean fun
static OSStatus ACInputProc(AudioConverterRef inAudioConverter, UInt32* ioNumberDataPackets, AudioBufferList* ioData, AudioStreamPacketDescription** outDataPacketDescription, void* inUserData)
{
@ -114,7 +121,7 @@ static OSStatus ACInputProc(AudioConverterRef inAudioConverter, UInt32* ioNumber
return err;
}
static OSStatus ACDownmixProc(AudioConverterRef inAudioConverter, UInt32* ioNumberDataPackets, AudioBufferList* ioData, AudioStreamPacketDescription** outDataPacketDescription, void* inUserData)
static OSStatus ACFloatProc(AudioConverterRef inAudioConverter, UInt32* ioNumberDataPackets, AudioBufferList* ioData, AudioStreamPacketDescription** outDataPacketDescription, void* inUserData)
{
ConverterNode *converter = (ConverterNode *)inUserData;
OSStatus err = noErr;
@ -128,17 +135,17 @@ static OSStatus ACDownmixProc(AudioConverterRef inAudioConverter, UInt32* ioNumb
return noErr;
}
amountToWrite = (*ioNumberDataPackets)*(converter->downmixFormat.mBytesPerPacket);
amountToWrite = (*ioNumberDataPackets)*(converter->floatFormat.mBytesPerPacket);
if ( amountToWrite + converter->downmixOffset > converter->downmixSize )
amountToWrite = converter->downmixSize - converter->downmixOffset;
if ( amountToWrite + converter->floatOffset > converter->floatSize )
amountToWrite = converter->floatSize - converter->floatOffset;
ioData->mBuffers[0].mData = converter->downmixBuffer + converter->downmixOffset;
ioData->mBuffers[0].mData = converter->floatBuffer + converter->floatOffset;
ioData->mBuffers[0].mDataByteSize = amountToWrite;
ioData->mBuffers[0].mNumberChannels = (converter->downmixFormat.mChannelsPerFrame);
ioData->mBuffers[0].mNumberChannels = (converter->floatFormat.mChannelsPerFrame);
ioData->mNumberBuffers = 1;
converter->downmixOffset += amountToWrite;
converter->floatOffset += amountToWrite;
return err;
}
@ -161,90 +168,135 @@ static OSStatus ACDownmixProc(AudioConverterRef inAudioConverter, UInt32* ioNumb
OSStatus err;
int amountRead = 0;
if ( converterDownmix )
{
if (downmixOffset == downmixSize) {
ioNumberFrames = amount / outputFormat.mBytesPerFrame;
downmixBuffer = realloc( downmixBuffer, ioNumberFrames * downmixFormat.mBytesPerFrame );
ioData.mBuffers[0].mData = downmixBuffer;
ioData.mBuffers[0].mDataByteSize = ioNumberFrames * downmixFormat.mBytesPerFrame;
ioData.mBuffers[0].mNumberChannels = downmixFormat.mChannelsPerFrame;
ioData.mNumberBuffers = 1;
tryagain:
err = AudioConverterFillComplexBuffer(converter, ACInputProc, self, &ioNumberFrames, &ioData, NULL);
amountRead += ioData.mBuffers[0].mDataByteSize;
if (err == 100)
{
NSLog(@"INSIZE: %i", amountRead);
ioData.mBuffers[0].mData = downmixBuffer + amountRead;
ioNumberFrames = ( amount / outputFormat.mBytesPerFrame ) - ( amountRead / downmixFormat.mBytesPerFrame );
ioData.mBuffers[0].mDataByteSize = ioNumberFrames * downmixFormat.mBytesPerFrame;
goto tryagain;
}
else if (err != noErr)
{
NSLog(@"Error: %i", err);
}
downmix_to_stereo( (float*) downmixBuffer, downmixFormat.mChannelsPerFrame, amountRead / downmixFormat.mBytesPerFrame );
downmixSize = amountRead;
downmixOffset = 0;
}
if (floatOffset == floatSize) {
ioNumberFrames = amount / outputFormat.mBytesPerFrame;
ioData.mBuffers[0].mData = dest;
ioData.mBuffers[0].mDataByteSize = amount;
ioData.mBuffers[0].mNumberChannels = outputFormat.mChannelsPerFrame;
floatBuffer = realloc( floatBuffer, ioNumberFrames * floatFormat.mBytesPerFrame );
ioData.mBuffers[0].mData = floatBuffer;
ioData.mBuffers[0].mDataByteSize = ioNumberFrames * floatFormat.mBytesPerFrame;
ioData.mBuffers[0].mNumberChannels = floatFormat.mChannelsPerFrame;
ioData.mNumberBuffers = 1;
amountRead = 0;
tryagain2:
err = AudioConverterFillComplexBuffer(converterDownmix, ACDownmixProc, self, &ioNumberFrames, &ioData, NULL);
tryagain:
err = AudioConverterFillComplexBuffer(converterFloat, ACInputProc, self, &ioNumberFrames, &ioData, NULL);
amountRead += ioData.mBuffers[0].mDataByteSize;
if (err == 100)
{
NSLog(@"INSIZE: %i", amountRead);
ioData.mBuffers[0].mData = dest + amountRead;
ioNumberFrames = ( amount - amountRead ) / outputFormat.mBytesPerFrame;
ioData.mBuffers[0].mDataByteSize = ioNumberFrames * outputFormat.mBytesPerFrame;
goto tryagain2;
ioData.mBuffers[0].mData = floatBuffer + amountRead;
ioNumberFrames = ( amount / outputFormat.mBytesPerFrame ) - ( amountRead / floatFormat.mBytesPerFrame );
ioData.mBuffers[0].mDataByteSize = ioNumberFrames * floatFormat.mBytesPerFrame;
goto tryagain;
}
else if (err != noErr && err != kAudioConverterErr_InvalidInputSize)
{
NSLog(@"Error: %i", err);
return amountRead;
}
if ( inputFormat.mChannelsPerFrame > 2 && outputFormat.mChannelsPerFrame == 2 )
downmix_to_stereo( (float*) floatBuffer, inputFormat.mChannelsPerFrame, amountRead / floatFormat.mBytesPerFrame );
scale_by_volume( (float*) floatBuffer, amountRead / sizeof(float), volumeScale);
floatSize = amountRead;
floatOffset = 0;
}
else
ioNumberFrames = amount / outputFormat.mBytesPerFrame;
ioData.mBuffers[0].mData = dest;
ioData.mBuffers[0].mDataByteSize = amount;
ioData.mBuffers[0].mNumberChannels = outputFormat.mChannelsPerFrame;
ioData.mNumberBuffers = 1;
amountRead = 0;
tryagain2:
err = AudioConverterFillComplexBuffer(converter, ACFloatProc, self, &ioNumberFrames, &ioData, NULL);
amountRead += ioData.mBuffers[0].mDataByteSize;
if (err == 100)
{
ioNumberFrames = amount/outputFormat.mBytesPerFrame;
ioData.mBuffers[0].mData = dest;
ioData.mBuffers[0].mDataByteSize = amount;
ioData.mBuffers[0].mNumberChannels = outputFormat.mChannelsPerFrame;
ioData.mNumberBuffers = 1;
tryagain3:
err = AudioConverterFillComplexBuffer(converter, ACInputProc, self, &ioNumberFrames, &ioData, NULL);
amountRead += ioData.mBuffers[0].mDataByteSize;
if (err == 100) //It returns insz at EOS at times...so run it again to make sure all data is converted
{
NSLog(@"INSIZE: %i", amountRead);
ioData.mBuffers[0].mData = dest + amountRead;
ioNumberFrames = ( amount - amountRead ) / outputFormat.mBytesPerFrame;
ioData.mBuffers[0].mDataByteSize = ioNumberFrames * outputFormat.mBytesPerFrame;
goto tryagain3;
}
else if (err != noErr) {
NSLog(@"Error: %i", err);
}
NSLog(@"INSIZE: %i", amountRead);
ioData.mBuffers[0].mData = dest + amountRead;
ioNumberFrames = ( amount - amountRead ) / outputFormat.mBytesPerFrame;
ioData.mBuffers[0].mDataByteSize = ioNumberFrames * outputFormat.mBytesPerFrame;
goto tryagain2;
}
else if (err != noErr && err != kAudioConverterErr_InvalidInputSize)
{
NSLog(@"Error: %i", err);
}
return amountRead;
}
- (void)registerObservers
{
NSLog(@"REGISTERING OBSERVERS");
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.volumeScaling" options:0 context:nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context
{
NSLog(@"SOMETHING CHANGED!");
if ([keyPath isEqual:@"values.volumeScaling"]) {
//User reset the volume scaling option
[self refreshVolumeScaling];
}
}
static float db_to_scale(float db)
{
return pow(10.0, db / 20);
}
- (void)refreshVolumeScaling
{
if (rgInfo == nil)
{
volumeScale = 1.0;
return;
}
NSString * scaling = [[NSUserDefaults standardUserDefaults] stringForKey:@"volumeScaling"];
BOOL useAlbum = [scaling hasPrefix:@"albumGain"];
BOOL useTrack = useAlbum || [scaling hasPrefix:@"trackGain"];
BOOL useVolume = useAlbum || useTrack || [scaling isEqualToString:@"volumeScale"];
BOOL usePeak = [scaling hasSuffix:@"WithPeak"];
float scale = 1.0;
float peak = 0.0;
if (useVolume) {
id pVolumeScale = [rgInfo objectForKey:@"volume"];
if (pVolumeScale != nil)
scale = [pVolumeScale floatValue];
}
if (useTrack) {
id trackGain = [rgInfo objectForKey:@"replayGainTrackGain"];
id trackPeak = [rgInfo objectForKey:@"replayGainTrackPeak"];
if (trackGain != nil)
scale = db_to_scale([trackGain floatValue]);
if (trackPeak != nil)
peak = [trackPeak floatValue];
}
if (useAlbum) {
id albumGain = [rgInfo objectForKey:@"replayGainAlbumGain"];
id albumPeak = [rgInfo objectForKey:@"replayGainAlbumPeak"];
if (albumGain != nil)
scale = db_to_scale([albumGain floatValue]);
if (albumPeak != nil)
peak = [albumPeak floatValue];
}
if (usePeak) {
if (scale * peak > 1.0)
scale = 1.0 / peak;
}
volumeScale = scale;
}
- (BOOL)setupWithInputFormat:(AudioStreamBasicDescription)inf outputFormat:(AudioStreamBasicDescription)outf
{
//Make the converter
@ -252,44 +304,41 @@ static OSStatus ACDownmixProc(AudioConverterRef inAudioConverter, UInt32* ioNumb
inputFormat = inf;
outputFormat = outf;
[self registerObservers];
floatFormat = inputFormat;
floatFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
floatFormat.mBitsPerChannel = 32;
floatFormat.mBytesPerFrame = (32/8)*floatFormat.mChannelsPerFrame;
floatFormat.mBytesPerPacket = floatFormat.mBytesPerFrame * floatFormat.mFramesPerPacket;
stat = AudioConverterNew( &inputFormat, &floatFormat, &converterFloat );
if (stat != noErr)
{
NSLog(@"Error creating converter %i", stat);
return NO;
}
stat = AudioConverterNew ( &floatFormat, &outputFormat, &converter );
if (stat != noErr)
{
NSLog(@"Error creating converter %i", stat);
return NO;
}
if (inputFormat.mChannelsPerFrame > 2 && outputFormat.mChannelsPerFrame == 2)
{
downmixFormat = inputFormat;
downmixFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
downmixFormat.mBitsPerChannel = 32;
downmixFormat.mBytesPerFrame = (32/8)*downmixFormat.mChannelsPerFrame;
downmixFormat.mBytesPerPacket = downmixFormat.mBytesPerFrame * downmixFormat.mFramesPerPacket;
stat = AudioConverterNew( &inputFormat, &downmixFormat, &converter );
if (stat != noErr)
{
NSLog(@"Error creating converter %i", stat);
}
stat = AudioConverterNew ( &downmixFormat, &outputFormat, &converterDownmix );
if (stat != noErr)
{
NSLog(@"Error creating converter %i", stat);
}
SInt32 channelMap[2] = { 0, 1 };
stat = AudioConverterSetProperty(converterDownmix,kAudioConverterChannelMap,sizeof(channelMap),channelMap);
stat = AudioConverterSetProperty(converter,kAudioConverterChannelMap,sizeof(channelMap),channelMap);
if (stat != noErr)
{
NSLog(@"Error mapping channels %i", stat);
return NO;
}
}
else
{
stat = AudioConverterNew ( &inputFormat, &outputFormat, &converter);
if (stat != noErr)
{
NSLog(@"Error creating converter %i", stat);
}
}
if (inputFormat.mChannelsPerFrame == 1)
else if (inputFormat.mChannelsPerFrame == 1)
{
SInt32 channelMap[2] = { 0, 0 };
@ -297,12 +346,15 @@ static OSStatus ACDownmixProc(AudioConverterRef inAudioConverter, UInt32* ioNumb
if (stat != noErr)
{
NSLog(@"Error mapping channels %i", stat);
return NO;
}
}
PrintStreamDesc(&inf);
PrintStreamDesc(&outf);
[self refreshVolumeScaling];
return YES;
}
@ -327,27 +379,40 @@ static OSStatus ACDownmixProc(AudioConverterRef inAudioConverter, UInt32* ioNumb
[self setupWithInputFormat:format outputFormat:outputFormat];
}
- (void)setRGInfo:(NSDictionary *)rgi
{
NSLog(@"Setting ReplayGain info");
[rgInfo release];
[rgi retain];
rgInfo = rgi;
[self refreshVolumeScaling];
}
- (void)cleanUp
{
if (converterDownmix)
[rgInfo release];
rgInfo = nil;
if (converterFloat)
{
AudioConverterDispose(converterDownmix);
converterDownmix = NULL;
AudioConverterDispose(converterFloat);
converterFloat = NULL;
}
if (converter)
{
AudioConverterDispose(converter);
converter = NULL;
}
if (downmixBuffer)
if (floatBuffer)
{
free(downmixBuffer);
downmixBuffer = NULL;
free(floatBuffer);
floatBuffer = NULL;
}
if (callbackBuffer) {
free(callbackBuffer);
callbackBuffer = NULL;
}
floatOffset = 0;
floatSize = 0;
}
@end

View File

@ -18,11 +18,9 @@
@interface InputNode : Node {
id<CogDecoder> decoder;
NSDictionary * rgInfo;
int bytesPerSample;
int bytesPerFrame;
int volumeScale;
BOOL floatingPoint;
BOOL swapEndian;
@ -41,10 +39,6 @@
- (BOOL)setTrack:(NSURL *)track;
- (void)setRGInfo:(NSDictionary *)rgi;
- (id<CogDecoder>) decoder;
- (void)refreshVolumeScaling;
@end

View File

@ -12,15 +12,6 @@
#import "CoreAudioUtils.h"
static BOOL hostIsBigEndian()
{
#ifdef __BIG_ENDIAN__
return YES;
#else
return NO;
#endif
}
@implementation InputNode
- (BOOL)openWithSource:(id<CogSource>)source
@ -43,21 +34,8 @@ static BOOL hostIsBigEndian()
int bitsPerSample = [[properties objectForKey:@"bitsPerSample"] intValue];
int channels = [[properties objectForKey:@"channels"] intValue];
bytesPerSample = bitsPerSample / 8;
bytesPerFrame = bytesPerSample * channels;
bytesPerFrame = (bitsPerSample / 8) * channels;
if (([[properties objectForKey:@"endian"] isEqualToString:@"big"] && !hostIsBigEndian()) ||
([[properties objectForKey:@"endian"] isEqualToString:@"little"] && hostIsBigEndian())) {
swapEndian = YES;
}
else {
swapEndian = NO;
}
floatingPoint = [[properties objectForKey:@"floatingPoint"] boolValue];
[self refreshVolumeScaling];
shouldContinue = YES;
shouldSeek = NO;
@ -74,11 +52,8 @@ static BOOL hostIsBigEndian()
int bitsPerSample = [[properties objectForKey:@"bitsPerSample"] intValue];
int channels = [[properties objectForKey:@"channels"] intValue];
bytesPerSample = bitsPerSample / 8;
bytesPerFrame = bytesPerSample * channels;
bytesPerFrame = (bitsPerSample / 8) * channels;
[self refreshVolumeScaling];
[self registerObservers];
shouldContinue = YES;
@ -101,8 +76,6 @@ static BOOL hostIsBigEndian()
forKeyPath:@"metadata"
options:(NSKeyValueObservingOptionNew)
context:NULL];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.volumeScaling" options:0 context:nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath
@ -116,104 +89,10 @@ static BOOL hostIsBigEndian()
//Inform something of properties change
//Disable support until it is properly implimented.
//[controller inputFormatDidChange: propertiesToASBD([decoder properties])];
[self refreshVolumeScaling];
}
else if ([keyPath isEqual:@"metadata"]) {
//Inform something of metadata change
}
else if ([keyPath isEqual:@"values.volumeScaling"]) {
//User reset the volume scaling option
[self refreshVolumeScaling];
}
}
static float db_to_scale(float db)
{
return pow(10.0, db / 20);
}
- (void)refreshVolumeScaling
{
NSDictionary *properties = [decoder properties];
if (rgInfo != nil)
properties = rgInfo;
NSString * scaling = [[NSUserDefaults standardUserDefaults] stringForKey:@"volumeScaling"];
BOOL useAlbum = [scaling hasPrefix:@"albumGain"];
BOOL useTrack = useAlbum || [scaling hasPrefix:@"trackGain"];
BOOL useVolume = useAlbum || useTrack || [scaling isEqualToString:@"volumeScale"];
BOOL usePeak = [scaling hasSuffix:@"WithPeak"];
float scale = 1.0;
float peak = 0.0;
if (useVolume) {
id pVolumeScale = [properties objectForKey:@"volume"];
if (pVolumeScale != nil)
scale = [pVolumeScale floatValue];
}
if (useTrack) {
id trackGain = [properties objectForKey:@"replayGainTrackGain"];
id trackPeak = [properties objectForKey:@"replayGainTrackPeak"];
if (trackGain != nil)
scale = db_to_scale([trackGain floatValue]);
if (trackPeak != nil)
peak = [trackPeak floatValue];
}
if (useAlbum) {
id albumGain = [properties objectForKey:@"replayGainAlbumGain"];
id albumPeak = [properties objectForKey:@"replayGainAlbumPeak"];
if (albumGain != nil)
scale = db_to_scale([albumGain floatValue]);
if (albumPeak != nil)
peak = [albumPeak floatValue];
}
if (usePeak) {
if (scale * peak > 1.0)
scale = 1.0 / peak;
}
volumeScale = scale * 4096;
}
static int16_t swap_16(uint16_t input)
{
return (input >> 8) | (input << 8);
}
static int32_t swap_24(uint32_t input)
{
int32_t temp = (input << 24) >> 8;
return temp | ((input >> 16) & 0xff) | (input & 0xff00);
}
static int32_t swap_32(uint32_t input)
{
return (input >> 24) | ((input >> 8) & 0xff00) | ((input << 8) & 0xff0000) | (input << 24);
}
static float swap_32f(float input)
{
union {
float f;
int32_t i;
} val;
val.f = input;
val.i = swap_32(val.i);
return val.f;
}
static int64_t swap_64(uint64_t input)
{
return (input >> 56) | ((input >> 40) & 0xff00) | ((input >> 24) & 0xff0000) | ((input >> 8) & 0xff000000) |
((input << 8) & 0xff00000000) | ((input << 24) & 0xff0000000000) | ((input << 40) & 0xff000000000000) | (input << 56);
}
static double swap_64f(double input)
{
union {
double f;
int64_t i;
} val;
val.f = input;
val.i = swap_64(val.i);
return val.f;
}
- (void)process
@ -256,123 +135,6 @@ static double swap_64f(double input)
break;
}
if (volumeScale != 4096) {
int totalFrames = amountInBuffer / bytesPerSample;
switch (bytesPerSample) {
case 1:
{
uint8_t * samples = (uint8_t *)inputBuffer;
for (int i = 0; i < totalFrames; i++)
{
int32_t sample = (int8_t)samples[i] - 128;
sample = (sample * volumeScale) >> 12;
if ((unsigned)(sample + 0x80) & 0xffffff00) sample = (sample >> 31) ^ 0x7f;
samples[i] = sample + 128;
}
}
break;
case 2:
{
int16_t * samples = (int16_t *)inputBuffer;
for (int i = 0; i < totalFrames; i++)
{
int32_t sample = samples[i];
if (swapEndian) sample = swap_16(sample);
sample = (sample * volumeScale) >> 12;
if ((unsigned)(sample + 0x8000) & 0xffff0000) sample = (sample >> 31) ^ 0x7fff;
if (swapEndian) sample = swap_16(sample);
samples[i] = sample;
}
}
break;
case 3:
{
uint8_t * samples = (uint8_t *)inputBuffer;
for (int i = 0; i < totalFrames; i++)
{
int32_t sample = (samples[i * 3] << 8) | (samples[i * 3 + 1] << 16) | (samples[i * 3 + 2] << 24);
sample >>= 8;
if (swapEndian) sample = swap_24(sample);
sample = (sample * volumeScale) >> 12;
if ((unsigned)(sample + 0x800000) & 0xff000000) sample = (sample >> 31) ^ 0x7fffff;
if (swapEndian) sample = swap_24(sample);
samples[i * 3] = sample;
samples[i * 3 + 1] = sample >> 8;
samples[i * 3 + 2] = sample >> 16;
}
}
break;
case 4:
if (floatingPoint)
{
float * samples = (float *)inputBuffer;
float scale = (float)volumeScale / 4096;
for (int i = 0; i < totalFrames; i++)
{
float sample = samples[i];
if (swapEndian) sample = swap_32f(sample);
sample *= scale;
if (swapEndian) sample = swap_32f(sample);
samples[i] = sample;
}
}
else
{
int32_t * samples = (int32_t *)inputBuffer;
for (int i = 0; i < totalFrames; i++)
{
int64_t sample = samples[i];
if (swapEndian) sample = swap_32(sample);
sample = (sample * volumeScale) >> 12;
if ((unsigned)(sample + 0x80000000) & 0xffffffff00000000) sample = (sample >> 63) ^ 0x7fffffff;
if (swapEndian) sample = swap_32(sample);
samples[i] = sample;
}
}
break;
case 8:
if (floatingPoint)
{
double * samples = (double *)inputBuffer;
double scale = (double)volumeScale / 4096;
for (int i = 0; i < totalFrames; i++)
{
double sample = samples[i];
if (swapEndian) sample = swap_64f(sample);
sample *= scale;
if (swapEndian) sample = swap_64f(sample);
samples[i] = sample;
}
}
else
{
int64_t * samples = (int64_t *)inputBuffer;
for (int i = 0; i < totalFrames; i++)
{
int64_t sample = samples[i];
if (swapEndian) sample = swap_64(sample);
int64_t high_part = sample >> (32 + 12);
int64_t low_part = (sample & 0xffffffff) | (sample >> 31);
high_part *= volumeScale;
low_part = (low_part * volumeScale) >> 12;
if (((uint64_t)low_part + 0x100000000) & 0xfffffffe00000000)
high_part += low_part >> 32;
if (((uint64_t)high_part + 0x80000000) & 0xffffffff00000000)
sample = high_part >> 63;
else
sample = (high_part << 32) | (low_part & 0xffffffff);
if (swapEndian) sample = swap_64(sample);
samples[i] = sample;
}
}
break;
}
}
[self writeData:inputBuffer amount:amountInBuffer];
amountInBuffer = 0;
}
@ -402,20 +164,10 @@ static double swap_64f(double input)
return NO;
}
- (void)setRGInfo:(NSDictionary *)i
{
[i retain];
[rgInfo release];
rgInfo = i;
[self refreshVolumeScaling];
}
- (void)dealloc
{
NSLog(@"Input Node dealloc");
[rgInfo release];
[decoder removeObserver:self forKeyPath:@"properties"];
[decoder removeObserver:self forKeyPath:@"metadata"];