[Audio Output] Fix serious memory leakage
For one thing, the example code I followed was Swift and handled auto releasing handles in the background, while Objective-C requires manual handle reference management. For two, there was no autoreleasepool around the block handling the input audio chunks, which need to be released as they are pulled out and disposed of. This also contributed to memory leakage. Signed-off-by: Christopher Snowhill <kode54@gmail.com>lastfm
parent
f2dee9e0a7
commit
8d10aa7e80
|
@ -347,6 +347,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
if(stopping)
|
if(stopping)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
@autoreleasepool {
|
||||||
while(!stopping && [audioRenderer isReadyForMoreMediaData]) {
|
while(!stopping && [audioRenderer isReadyForMoreMediaData]) {
|
||||||
CMSampleBufferRef bufferRef = [self makeSampleBuffer];
|
CMSampleBufferRef bufferRef = [self makeSampleBuffer];
|
||||||
|
|
||||||
|
@ -357,11 +358,14 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
trackPts = CMTimeAdd(trackPts, chunkDuration);
|
trackPts = CMTimeAdd(trackPts, chunkDuration);
|
||||||
|
|
||||||
[audioRenderer enqueueSampleBuffer:bufferRef];
|
[audioRenderer enqueueSampleBuffer:bufferRef];
|
||||||
|
|
||||||
|
CFRelease(bufferRef);
|
||||||
} else {
|
} else {
|
||||||
stopFlush = YES;
|
stopFlush = YES;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if(!paused && !started) {
|
if(!paused && !started) {
|
||||||
[self resume];
|
[self resume];
|
||||||
|
@ -640,6 +644,11 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
layout.mChannelBitmap = streamChannelConfig;
|
layout.mChannelBitmap = streamChannelConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(audioFormatDescription) {
|
||||||
|
CFRelease(audioFormatDescription);
|
||||||
|
audioFormatDescription = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
if(CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &streamFormat, sizeof(layout), &layout, 0, NULL, NULL, &audioFormatDescription) != noErr) {
|
if(CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &streamFormat, sizeof(layout), &layout, 0, NULL, NULL, &audioFormatDescription) != noErr) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -674,8 +683,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
|
|
||||||
OSStatus err = CMAudioSampleBufferCreateReadyWithPacketDescriptions(kCFAllocatorDefault, blockBuffer, audioFormatDescription, samplesRendered, outputPts, nil, &sampleBuffer);
|
OSStatus err = CMAudioSampleBufferCreateReadyWithPacketDescriptions(kCFAllocatorDefault, blockBuffer, audioFormatDescription, samplesRendered, outputPts, nil, &sampleBuffer);
|
||||||
if(err != noErr) {
|
if(err != noErr) {
|
||||||
|
CFRelease(blockBuffer);
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
CFRelease(blockBuffer);
|
||||||
|
|
||||||
return sampleBuffer;
|
return sampleBuffer;
|
||||||
}
|
}
|
||||||
|
@ -744,6 +755,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
status = AudioUnitRender(_eq, NULL, &timeStamp, 0, samplesRendered, ioData);
|
status = AudioUnitRender(_eq, NULL, &timeStamp, 0, samplesRendered, ioData);
|
||||||
|
|
||||||
if(status != noErr) {
|
if(status != noErr) {
|
||||||
|
CFRelease(blockListBuffer);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -761,20 +773,27 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, nil, dataByteSize, kCFAllocatorDefault, nil, 0, dataByteSize, kCMBlockBufferAssureMemoryNowFlag, &blockBuffer);
|
status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, nil, dataByteSize, kCFAllocatorDefault, nil, 0, dataByteSize, kCMBlockBufferAssureMemoryNowFlag, &blockBuffer);
|
||||||
|
|
||||||
if(status != noErr || !blockBuffer) {
|
if(status != noErr || !blockBuffer) {
|
||||||
|
CFRelease(blockListBuffer);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
status = CMBlockBufferReplaceDataBytes(samplePtr, blockBuffer, 0, dataByteSize);
|
status = CMBlockBufferReplaceDataBytes(samplePtr, blockBuffer, 0, dataByteSize);
|
||||||
|
|
||||||
if(status != noErr) {
|
if(status != noErr) {
|
||||||
|
CFRelease(blockBuffer);
|
||||||
|
CFRelease(blockListBuffer);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
status = CMBlockBufferAppendBufferReference(blockListBuffer, blockBuffer, 0, CMBlockBufferGetDataLength(blockBuffer), 0);
|
status = CMBlockBufferAppendBufferReference(blockListBuffer, blockBuffer, 0, CMBlockBufferGetDataLength(blockBuffer), 0);
|
||||||
|
|
||||||
if(status != noErr) {
|
if(status != noErr) {
|
||||||
|
CFRelease(blockBuffer);
|
||||||
|
CFRelease(blockListBuffer);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CFRelease(blockBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(i == 0) {
|
if(i == 0) {
|
||||||
|
@ -809,6 +828,8 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
stopCompleted = NO;
|
stopCompleted = NO;
|
||||||
commandStop = NO;
|
commandStop = NO;
|
||||||
|
|
||||||
|
audioFormatDescription = NULL;
|
||||||
|
|
||||||
running = NO;
|
running = NO;
|
||||||
stopping = NO;
|
stopping = NO;
|
||||||
stopped = NO;
|
stopped = NO;
|
||||||
|
@ -1078,6 +1099,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
usleep(5000);
|
usleep(5000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if(audioFormatDescription) {
|
||||||
|
CFRelease(audioFormatDescription);
|
||||||
|
audioFormatDescription = NULL;
|
||||||
|
}
|
||||||
if(_eq) {
|
if(_eq) {
|
||||||
[outputController endEqualizer:_eq];
|
[outputController endEqualizer:_eq];
|
||||||
if(eqInitialized) {
|
if(eqInitialized) {
|
||||||
|
|
Loading…
Reference in New Issue