首页
学习
活动
专区
圈层
工具
发布
社区首页 >问答首页 >如何发现ios xcode中的内存泄漏?

如何发现ios xcode中的内存泄漏?
EN

Stack Overflow用户
提问于 2014-08-12 20:22:43
回答 1查看 692关注 0票数 0

这是我的RTSP流媒体ios应用程序,带有FFMPEG解码器,流媒体效果很好,但运行时内存不断增加。请帮帮我,是不是内存泄露了?我怎样才能追踪泄漏的情况?

这是我的视频流类: RTSPPlayer.m

代码语言:javascript
复制
#import "RTSPPlayer.h"
#import "Utilities.h"
#import "AudioStreamer.h"

@interface RTSPPlayer ()
@property (nonatomic, retain) AudioStreamer *audioController;
@end

@interface RTSPPlayer (private)
-(void)convertFrameToRGB;
-(UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height;
-(void)setupScaler;
@end

@implementation RTSPPlayer

@synthesize audioController = _audioController;
@synthesize audioPacketQueue,audioPacketQueueSize;
@synthesize _audioStream,_audioCodecContext;
@synthesize emptyAudioBuffer;

@synthesize outputWidth, outputHeight;

- (void)setOutputWidth:(int)newValue
{
    if (outputWidth != newValue) {
        outputWidth = newValue;
        [self setupScaler];
    }
}

- (void)setOutputHeight:(int)newValue
{
    if (outputHeight != newValue) {
        outputHeight = newValue;
        [self setupScaler];
    }
}

- (UIImage *)currentImage
{
    if (!pFrame->data[0]) return nil;
    [self convertFrameToRGB];
    return [self imageFromAVPicture:picture width:outputWidth height:outputHeight];
}

- (double)duration
{
    return (double)pFormatCtx->duration / AV_TIME_BASE;
}

- (double)currentTime
{
    AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
    return packet.pts * (double)timeBase.num / timeBase.den;
}

- (int)sourceWidth
{
    return pCodecCtx->width;
}

- (int)sourceHeight
{
    return pCodecCtx->height;
}

- (id)initWithVideo:(NSString *)moviePath usesTcp:(BOOL)usesTcp
{
    if (!(self=[super init])) return nil;

    AVCodec         *pCodec;

    // Register all formats and codecs
    avcodec_register_all();
    av_register_all();
    avformat_network_init();

    // Set the RTSP Options
    AVDictionary *opts = 0;
    if (usesTcp) 
        av_dict_set(&opts, "rtsp_transport", "tcp", 0);


    if (avformat_open_input(&pFormatCtx, [moviePath UTF8String], NULL, &opts) !=0 ) {
        av_log(NULL, AV_LOG_ERROR, "Couldn't open file\n");
        goto initError;
    }

    // Retrieve stream information
    if (avformat_find_stream_info(pFormatCtx,NULL) < 0) {
        av_log(NULL, AV_LOG_ERROR, "Couldn't find stream information\n");
        goto initError;
    }

    // Find the first video stream
    videoStream=-1;
    audioStream=-1;

    for (int i=0; i<pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
            NSLog(@"found video stream");
            videoStream=i;
        }

        if (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO) {
            audioStream=i;
            NSLog(@"found audio stream");
        }
    }

    if (videoStream==-1 && audioStream==-1) {
        goto initError;
    }

    // Get a pointer to the codec context for the video stream
    pCodecCtx = pFormatCtx->streams[videoStream]->codec;

    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if (pCodec == NULL) {
        av_log(NULL, AV_LOG_ERROR, "Unsupported codec!\n");
        goto initError;
    }

    // Open codec
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot open video decoder\n");
        goto initError;
    }

    if (audioStream > -1 ) {
        NSLog(@"set up audiodecoder");
        [self setupAudioDecoder];
    }

    // Allocate video frame
    pFrame = avcodec_alloc_frame();

    outputWidth = pCodecCtx->width;
    self.outputHeight = pCodecCtx->height;

    return self;

initError:
//  [self release];
    return nil;
}


- (void)setupScaler
{
    // Release old picture and scaler
    avpicture_free(&picture);
    sws_freeContext(img_convert_ctx);   

    // Allocate RGB picture
    avpicture_alloc(&picture, PIX_FMT_RGB24, outputWidth, outputHeight);

    // Setup scaler
    static int sws_flags =  SWS_FAST_BILINEAR;
    img_convert_ctx = sws_getContext(pCodecCtx->width, 
                                     pCodecCtx->height,
                                     pCodecCtx->pix_fmt,
                                     outputWidth, 
                                     outputHeight,
                                     PIX_FMT_RGB24,
                                     sws_flags, NULL, NULL, NULL);

}

- (void)seekTime:(double)seconds
{
    AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
    int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num * seconds);
    avformat_seek_file(pFormatCtx, videoStream, targetFrame, targetFrame, targetFrame, AVSEEK_FLAG_FRAME);
    avcodec_flush_buffers(pCodecCtx);
}

- (void)dealloc
{
    // Free scaler
    sws_freeContext(img_convert_ctx);   

    // Free RGB picture
    avpicture_free(&picture);

    // Free the packet that was allocated by av_read_frame
    av_free_packet(&packet);

    // Free the YUV frame
    av_free(pFrame);

    // Close the codec
    if (pCodecCtx) avcodec_close(pCodecCtx);

    // Close the video file
    if (pFormatCtx) avformat_close_input(&pFormatCtx);

    [_audioController _stopAudio];
   // [_audioController release];
    _audioController = nil;

  //  [audioPacketQueue release];
    audioPacketQueue = nil;

 //   [audioPacketQueueLock release];
    audioPacketQueueLock = nil;

//  [super dealloc];
}

- (BOOL)stepFrame
{
    // AVPacket packet;
    int frameFinished=0;

    while (!frameFinished && av_read_frame(pFormatCtx, &packet) >=0 ) {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream) {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
        }

        if (packet.stream_index==audioStream) {
            // NSLog(@"audio stream");
            [audioPacketQueueLock lock];

            audioPacketQueueSize += packet.size;
            [audioPacketQueue addObject:[NSMutableData dataWithBytes:&packet length:sizeof(packet)]];

            [audioPacketQueueLock unlock];

            if (!primed) {
                primed=YES;
                [_audioController _startAudio];
            }

            if (emptyAudioBuffer) {
                [_audioController enqueueBuffer:emptyAudioBuffer];
            }
        }
    }

    return frameFinished!=0;
}

- (void)convertFrameToRGB
{
    sws_scale(img_convert_ctx,
              pFrame->data,
              pFrame->linesize,
              0,
              pCodecCtx->height,
              picture.data,
              picture.linesize);
}

- (UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height
{
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
    CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, pict.data[0], pict.linesize[0]*height,kCFAllocatorNull);
    CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGImageRef cgImage = CGImageCreate(width, 
                                       height, 
                                       8, 
                                       24, 
                                       pict.linesize[0], 
                                       colorSpace, 
                                       bitmapInfo, 
                                       provider, 
                                       NULL, 
                                       NO, 
                                       kCGRenderingIntentDefault);
    CGColorSpaceRelease(colorSpace);
    UIImage *image = [UIImage imageWithCGImage:cgImage];

    CGImageRelease(cgImage);
    CGDataProviderRelease(provider);
    CFRelease(data);

    return image;
}

- (void)setupAudioDecoder
{    
    if (audioStream >= 0) {
        _audioBufferSize = AVCODEC_MAX_AUDIO_FRAME_SIZE;
        _audioBuffer = av_malloc(_audioBufferSize);
        _inBuffer = NO;

        _audioCodecContext = pFormatCtx->streams[audioStream]->codec;
        _audioStream = pFormatCtx->streams[audioStream];

        AVCodec *codec = avcodec_find_decoder(_audioCodecContext->codec_id);
        if (codec == NULL) {
            NSLog(@"Not found audio codec.");
            return;
        }

        if (avcodec_open2(_audioCodecContext, codec, NULL) < 0) {
            NSLog(@"Could not open audio codec.");
            return;
        }

        if (audioPacketQueue) {
          //  [audioPacketQueue release];
            audioPacketQueue = nil;
        }        
        audioPacketQueue = [[NSMutableArray alloc] init];

        if (audioPacketQueueLock) {
        //    [audioPacketQueueLock release];
            audioPacketQueueLock = nil;
        }
        audioPacketQueueLock = [[NSLock alloc] init];

        if (_audioController) {
            [_audioController _stopAudio];
         //   [_audioController release];
            _audioController = nil;
        }
        _audioController = [[AudioStreamer alloc] initWithStreamer:self];
    } else {
        pFormatCtx->streams[audioStream]->discard = AVDISCARD_ALL;
        audioStream = -1;
    }
}

- (void)nextPacket
{
    _inBuffer = NO;
}

- (AVPacket*)readPacket
{
    if (_currentPacket.size > 0 || _inBuffer) return &_currentPacket;

    NSMutableData *packetData = [audioPacketQueue objectAtIndex:0];
    _packet = [packetData mutableBytes];

    if (_packet) {
        if (_packet->dts != AV_NOPTS_VALUE) {
            _packet->dts += av_rescale_q(0, AV_TIME_BASE_Q, _audioStream->time_base);
        }

        if (_packet->pts != AV_NOPTS_VALUE) {
            _packet->pts += av_rescale_q(0, AV_TIME_BASE_Q, _audioStream->time_base);
        }

        [audioPacketQueueLock lock];
        audioPacketQueueSize -= _packet->size;
        if ([audioPacketQueue count] > 0) {
            [audioPacketQueue removeObjectAtIndex:0];
        }
        [audioPacketQueueLock unlock];

        _currentPacket = *(_packet);
    }

    return &_currentPacket;   
}

- (void)closeAudio
{
    [_audioController _stopAudio];
    primed=NO;
}

@end
EN

回答 1

Stack Overflow用户

发布于 2014-08-12 20:26:09

作为格式化和图像的答案。

使用仪器检查由于保留但不是泄漏的内存而导致的泄漏和内存丢失。后者是仍然指向的未使用的内存。在仪器上的分配工具中使用标记生成(堆快照)。

有关使用堆快照查找内存碎片的信息,请参阅:bbum blog

基本上,该方法是运行Instruments allocate工具,获取堆,运行代码的迭代,并重复3到4次获取另一次堆。这将指示在迭代期间分配的和未释放的内存。

要弄清楚结果,请查看各个分配。

如果您需要查看对象的保留位置、释放位置和自动释放位置,请使用仪器:

在仪器中运行,在分配中设置"Record reference counts“on (对于Xcode 5和更低版本,您必须停止记录以设置该选项)。使应用程序运行,停止录制,向下钻取,您将能够看到所有保留、释放和自动释放发生的位置。

票数 1
EN
页面原文内容由Stack Overflow提供。腾讯云小微IT领域专用引擎提供翻译支持
原文链接:

https://stackoverflow.com/questions/25264208

复制
相关文章

相似问题

领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档