如何修复泄漏CVPixelBuffer

请告诉我在这个代码泄漏的地方…

//在这里,我用文档目录中的图像做了video

- (void) testCompressionSession:(NSString *)path { if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { [[NSFileManager defaultManager] removeItemAtPath:path error:nil]; } NSArray *array = [dictInfo objectForKey:@"sortedKeys"]; NSString *betaCompressionDirectory = path; NSError *error = nil; unlink([betaCompressionDirectory UTF8String]); NSLog(@"array = %@",array); NSData *imgDataTmp = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:0]]]; NSLog(@"link : %@",[projectPath stringByAppendingPathComponent:[array objectAtIndex:0]]); CGSize size = CGSizeMake([UIImage imageWithData:imgDataTmp].size.width, [UIImage imageWithData:imgDataTmp].size.height); //----initialize compression engine NSLog(@"size : w : %f, h : %f",size.width,size.height); AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory] fileType:AVFileTypeQuickTimeMovie error:&error]; NSParameterAssert(videoWriter); if(error) NSLog(@"error = %@", [error localizedDescription]); NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:size.width], AVVideoWidthKey, [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil]; AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary]; NSParameterAssert(writerInput); NSParameterAssert([videoWriter canAddInput:writerInput]); if ([videoWriter canAddInput:writerInput]) NSLog(@"I can add this input"); else NSLog(@"i can't add this input"); [videoWriter addInput:writerInput]; [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL); [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{ //BOOL isEffect = NO; int i = 0; float totalTime = 0.0f; float nextTime = 0; if ([writerInput isReadyForMoreMediaData]) { while (1) { if (i <= [array count] && i > 0) { nextTime = [[dictInfo objectForKey:[array objectAtIndex:i-1]] floatValue]; } totalTime += i == 0 ? 0 : nextTime; CMTime presentTime=CMTimeMake(totalTime, 1); printf("presentTime : %f ",CMTimeGetSeconds(presentTime)); if (i >= [array count]) { NSData *imgData = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:i-1]]]; UIImage* tmpImg = [UIImage imageWithData:imgData]; tmpImg = [self imageWithImage:tmpImg scaledToSize:size]; while ( !writerInput.readyForMoreMediaData) { sleep(0.01); } CVPixelBufferRef buffer = NULL; buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size]; [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(totalTime-nextTime+(nextTime/2.0), 1)]; NSLog(@"%f",totalTime-nextTime+(nextTime/2.0)); [writerInput markAsFinished]; [videoWriter finishWriting]; //CVPixelBufferPoolRelease(adaptor.pixelBufferPool); [videoWriter release]; break; } else { NSData *imgData = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:i]]]; UIImage* tmpImg = [UIImage imageWithData:imgData]; //tmpImg = [self imageWithImage:tmpImg scaledToSize:size]; //UIImageWriteToSavedPhotosAlbum(tmpImg, nil, nil, nil); while (!adaptor.assetWriterInput.readyForMoreMediaData && !writerInput.readyForMoreMediaData) { sleep(0.01); } CVPixelBufferRef buffer = NULL; buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size]; if (buffer) { if(![adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]) NSLog(@"FAIL"); else NSLog(@"Success:%d",i); CVPixelBufferRelease(buffer); } } i++; } } }]; 

//在这里我做了CGImageRef的CVPixelBufferRef

 - (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size { NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; CVPixelBufferRef pxbuffer = NULL; CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer); NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); CVPixelBufferLockBaseAddress(pxbuffer, 0); void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); NSParameterAssert(pxdata != NULL); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst); NSParameterAssert(context); CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); CVPixelBufferUnlockBaseAddress(pxbuffer, 0); return pxbuffer; 

泄漏日志是:

CVObject CFRetain 00:37.957.985 2 0x1ecae0 0 CoreVideo CVPixelBufferPool :: createPixelBuffer(__ CFAllocator const *,__CFDictionary const *,int *)Malloc 96 Bytes Malloc 00:40.015.872 1 0x1f0750 96 CoreVideo CVBuffer :: init()CVPixelBuffer Malloc 00: 40.969.716 1 0x1f2570 96 CoreVideo CVObject :: alloc(unsigned long,__CFAllocator const *,unsigned long,unsigned long)

看这里:

 CVPixelBufferRef buffer = NULL; CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer); CVPixelBufferLockBaseAddress(buffer, 0); buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size]; 

首先创build一个像素缓冲区,并将其地址放入info缓冲区variables,然后同一个variables被pixelBufferFromCGImage覆盖,所以其之前的内容不能再被释放。

编辑

你刚刚删除了我使用的代码,所以我的答案现在不再适用。

现在这个部分:

 CVPixelBufferRef buffer = NULL; buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size]; [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(totalTime-nextTime+(nextTime/2.0), 1)]; NSLog(@"%f",totalTime-nextTime+(nextTime/2.0)); ... 

你有一个注释掉CVPixelBufferPoolRelease(adaptor.pixelBufferPool),没关系,因为在这个版本中你没有像素缓冲池,但是我在这里错过了一个对CVPixelBufferRelease(buffer)的调用。