0

我做了一个示例项目,首先创建一个音频单元渲染回调,然后它每 1/20 秒分配一次内存,并在视图控制器收到低内存警告时立即释放它。通知只发送一次,第二次视图控制器没有收到任何通知并由于内存压力而崩溃。在一切正常之前,这只发生在 iOs 7 上。

这是代码:

#import <AudioUnit/AudioUnit.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AVFoundation/AVFoundation.h>
AudioUnit audioUnit;

@interface ViewController ()
    @property (atomic,strong) NSMutableData *data;
@end

@implementation ViewController
- (void)viewDidLoad{
   [super viewDidLoad];
   self.data=[NSMutableData dataWithLength:1];
   [NSTimer scheduledTimerWithTimeInterval:1/20.0
                                         target:self
                                       selector: @selector(increaseMemory)
                                       userInfo:nil repeats:YES];
    //Comment setupAudio for testing without Audio
    [self setupAudio];
}

-(void)increaseMemory{
    [_data appendData:[NSMutableData dataWithLength:10000000]];
    NSLog( @"Allocating %i Bytes",[_data length]);
}


OSStatus renderCallback(void *userData, AudioUnitRenderActionFlags *actionFlags,
                    const AudioTimeStamp *audioTimeStamp, UInt32 busNumber,
                    UInt32 numFrames, AudioBufferList *buffers) {
    OSStatus status = AudioUnitRender(audioUnit, actionFlags, audioTimeStamp,1, numFrames, buffers);
    if(status != noErr) {
        NSLog(@"Render callback error");
        return status;
    }
    return noErr;
}


-(void)setupAudio{
    int sampleRate=44100;
    int bufferSize=512;
    if(AudioSessionInitialize(NULL, NULL, NULL, NULL) != noErr) {
        return;
    }
    if(AudioSessionSetActive(true) != noErr) {
        return;
    }
    Float32 bufferSizeInSec =bufferSize*1.0f/sampleRate;
    if(AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration,
                           sizeof(Float32), &bufferSizeInSec) != noErr) {
        return;
    }
    UInt32 overrideCategory = 1;
    UInt32 audioCategory = kAudioSessionCategory_PlayAndRecord;
    if(AudioSessionSetProperty(kAudioSessionProperty_AudioCategory,
                           sizeof(UInt32), &audioCategory) != noErr) {
        return;
    }
    if(AudioSessionSetProperty(kAudioSessionProperty_OverrideCategoryDefaultToSpeaker,
                           sizeof(UInt32), &overrideCategory) != noErr) {
    // Less serious error, but you may want to handle it and bail here
    }
    AudioComponentDescription componentDescription;
    componentDescription.componentType = kAudioUnitType_Output;
    componentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
    componentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
    componentDescription.componentFlags = 0;
    componentDescription.componentFlagsMask = 0;
    AudioComponent component = AudioComponentFindNext(NULL, &componentDescription);
    if(AudioComponentInstanceNew(component,&audioUnit) != noErr) {
        return;
    }
    UInt32 enable = 1;
    if(AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO,
                        kAudioUnitScope_Input, 1, &enable, sizeof(UInt32)) != noErr) {
        return;
    }
    AURenderCallbackStruct callbackStruct;
    callbackStruct.inputProc = renderCallback; // Render function
    callbackStruct.inputProcRefCon = NULL;
    if(AudioUnitSetProperty(audioUnit, kAudioUnitProperty_SetRenderCallback,
                        kAudioUnitScope_Input, 0, &callbackStruct,
                        sizeof(AURenderCallbackStruct)) != noErr) {
        return;
    }
    AudioStreamBasicDescription streamDescription;
    streamDescription.mSampleRate = sampleRate;
    streamDescription.mFormatID = kAudioFormatLinearPCM;
    streamDescription.mFormatFlags = kAudioFormatFlagIsFloat |
    kAudioFormatFlagsNativeEndian |

    kAudioFormatFlagIsPacked;
    streamDescription.mBitsPerChannel = 32;
    streamDescription.mBytesPerFrame = 4;
    streamDescription.mChannelsPerFrame = 1;
    streamDescription.mBytesPerPacket = streamDescription.mBytesPerFrame *
    streamDescription.mChannelsPerFrame;
    streamDescription.mFramesPerPacket = 1;
    streamDescription.mReserved = 0;
    if(AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat,
                        kAudioUnitScope_Input, 0, &streamDescription, sizeof(streamDescription)) != noErr) {
        return;
    }
    if(AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat,
                        kAudioUnitScope_Output, 1, &streamDescription, sizeof(streamDescription)) != noErr) {
        return;
    }
    if(AudioUnitInitialize(audioUnit) != noErr) {
        return;
  }
    if(AudioOutputUnitStart(audioUnit) != noErr) {
        return;
    }
    NSLog(@"Audio Unit setup Complete");
}

- (void)didReceiveMemoryWarning{
    [super didReceiveMemoryWarning];
    self.data=[NSMutableData dataWithLength:1];
    NSLog(@"MEMORY WARNING EVENT -> CLEARING DATA");
}
@end
4

1 回答 1

0

我有同样的问题。在 iOS7 上,我的看法是使用第一个内存警告通知来准备更最终的解决方案。例如终止预定的计时器,并给用户一个内存不足和音频已关闭的警报,并释放一些内存。

于 2014-06-07T06:51:14.330 回答