From 8d10aa7e80c81cd1ea63614addc506dd0939521a Mon Sep 17 00:00:00 2001 From: Christopher Snowhill Date: Sat, 25 Jun 2022 06:00:11 -0700 Subject: [PATCH] [Audio Output] Fix serious memory leakage For one thing, the example code I followed was Swift and handled auto releasing handles in the background, while Objective-C requires manual handle reference management. For two, there was no autoreleasepool around the block handling the input audio chunks, which need to be released as they are pulled out and disposed of. This also contributed to memory leakage. Signed-off-by: Christopher Snowhill --- Audio/Output/OutputAVFoundation.m | 45 ++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 10 deletions(-) diff --git a/Audio/Output/OutputAVFoundation.m b/Audio/Output/OutputAVFoundation.m index d6b3b2621..84f122916 100644 --- a/Audio/Output/OutputAVFoundation.m +++ b/Audio/Output/OutputAVFoundation.m @@ -347,19 +347,23 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons if(stopping) break; - while(!stopping && [audioRenderer isReadyForMoreMediaData]) { - CMSampleBufferRef bufferRef = [self makeSampleBuffer]; + @autoreleasepool { + while(!stopping && [audioRenderer isReadyForMoreMediaData]) { + CMSampleBufferRef bufferRef = [self makeSampleBuffer]; - if(bufferRef) { - CMTime chunkDuration = CMSampleBufferGetDuration(bufferRef); + if(bufferRef) { + CMTime chunkDuration = CMSampleBufferGetDuration(bufferRef); - outputPts = CMTimeAdd(outputPts, chunkDuration); - trackPts = CMTimeAdd(trackPts, chunkDuration); + outputPts = CMTimeAdd(outputPts, chunkDuration); + trackPts = CMTimeAdd(trackPts, chunkDuration); - [audioRenderer enqueueSampleBuffer:bufferRef]; - } else { - stopFlush = YES; - break; + [audioRenderer enqueueSampleBuffer:bufferRef]; + + CFRelease(bufferRef); + } else { + stopFlush = YES; + break; + } } } @@ -640,6 +644,11 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons layout.mChannelBitmap = streamChannelConfig; } + if(audioFormatDescription) { + CFRelease(audioFormatDescription); + audioFormatDescription = NULL; + } + if(CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &streamFormat, sizeof(layout), &layout, 0, NULL, NULL, &audioFormatDescription) != noErr) { return; } @@ -674,8 +683,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons OSStatus err = CMAudioSampleBufferCreateReadyWithPacketDescriptions(kCFAllocatorDefault, blockBuffer, audioFormatDescription, samplesRendered, outputPts, nil, &sampleBuffer); if(err != noErr) { + CFRelease(blockBuffer); return nil; } + CFRelease(blockBuffer); return sampleBuffer; } @@ -744,6 +755,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons status = AudioUnitRender(_eq, NULL, &timeStamp, 0, samplesRendered, ioData); if(status != noErr) { + CFRelease(blockListBuffer); return 0; } @@ -761,20 +773,27 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, nil, dataByteSize, kCFAllocatorDefault, nil, 0, dataByteSize, kCMBlockBufferAssureMemoryNowFlag, &blockBuffer); if(status != noErr || !blockBuffer) { + CFRelease(blockListBuffer); return 0; } status = CMBlockBufferReplaceDataBytes(samplePtr, blockBuffer, 0, dataByteSize); if(status != noErr) { + CFRelease(blockBuffer); + CFRelease(blockListBuffer); return 0; } status = CMBlockBufferAppendBufferReference(blockListBuffer, blockBuffer, 0, CMBlockBufferGetDataLength(blockBuffer), 0); if(status != noErr) { + CFRelease(blockBuffer); + CFRelease(blockListBuffer); return 0; } + + CFRelease(blockBuffer); } if(i == 0) { @@ -809,6 +828,8 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons stopCompleted = NO; commandStop = NO; + audioFormatDescription = NULL; + running = NO; stopping = NO; stopped = NO; @@ -1078,6 +1099,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons usleep(5000); } } + if(audioFormatDescription) { + CFRelease(audioFormatDescription); + audioFormatDescription = NULL; + } if(_eq) { [outputController endEqualizer:_eq]; if(eqInitialized) {