2011-12-27 2 views
1
해결하는 방법

이 ... 곳이 코드에서 누수가누출 CVPixelBuffer

을 말해주십시오 // 여기에 내가 문서 디렉토리에서 이미지와 비디오를했다

- (void) testCompressionSession:(NSString *)path 
{ 
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { 
    [[NSFileManager defaultManager] removeItemAtPath:path error:nil]; 
} 
NSArray *array = [dictInfo objectForKey:@"sortedKeys"]; 

NSString *betaCompressionDirectory = path; 
NSError *error = nil; 

unlink([betaCompressionDirectory UTF8String]); 

NSLog(@"array = %@",array); 
NSData *imgDataTmp = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:0]]]; 
NSLog(@"link : %@",[projectPath stringByAppendingPathComponent:[array objectAtIndex:0]]); 
CGSize size = CGSizeMake([UIImage imageWithData:imgDataTmp].size.width, [UIImage imageWithData:imgDataTmp].size.height); 
//----initialize compression engine 
NSLog(@"size : w : %f, h : %f",size.width,size.height); 
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory] 
                 fileType:AVFileTypeQuickTimeMovie 
                  error:&error]; 
NSParameterAssert(videoWriter); 
if(error) 
    NSLog(@"error = %@", [error localizedDescription]); 

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, 
           [NSNumber numberWithInt:size.width], AVVideoWidthKey, 
           [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil]; 
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; 

NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: 
                 [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; 

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput 
                               sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary]; 
NSParameterAssert(writerInput); 
NSParameterAssert([videoWriter canAddInput:writerInput]); 

if ([videoWriter canAddInput:writerInput]) 
    NSLog(@"I can add this input"); 
else 
    NSLog(@"i can't add this input"); 

[videoWriter addInput:writerInput]; 

[videoWriter startWriting]; 
[videoWriter startSessionAtSourceTime:kCMTimeZero]; 

dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL); 

[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{ 
    //BOOL isEffect = NO; 
    int i = 0; 
    float totalTime = 0.0f; 
    float nextTime = 0; 
    if ([writerInput isReadyForMoreMediaData]) { 
     while (1) 
     { 
      if (i <= [array count] && i > 0) { 
       nextTime = [[dictInfo objectForKey:[array objectAtIndex:i-1]] floatValue]; 
      } 
      totalTime += i == 0 ? 0 : nextTime; 
      CMTime presentTime=CMTimeMake(totalTime, 1); 
      printf("presentTime : %f ",CMTimeGetSeconds(presentTime)); 
      if (i >= [array count]) 
      { 
       NSData *imgData = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:i-1]]]; 
       UIImage* tmpImg = [UIImage imageWithData:imgData]; 
       tmpImg = [self imageWithImage:tmpImg scaledToSize:size]; 
       while (!writerInput.readyForMoreMediaData) 
       { 
        sleep(0.01); 
       } 
       CVPixelBufferRef buffer = NULL; 
       buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size]; 
       [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(totalTime-nextTime+(nextTime/2.0), 1)]; 
       NSLog(@"%f",totalTime-nextTime+(nextTime/2.0)); 
       [writerInput markAsFinished]; 
       [videoWriter finishWriting]; 
       //CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 
       [videoWriter release]; 
       break; 
      } else { 
       NSData *imgData = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:i]]]; 
       UIImage* tmpImg = [UIImage imageWithData:imgData]; 
       //tmpImg = [self imageWithImage:tmpImg scaledToSize:size]; 
       //UIImageWriteToSavedPhotosAlbum(tmpImg, nil, nil, nil); 
       while (!adaptor.assetWriterInput.readyForMoreMediaData && !writerInput.readyForMoreMediaData) 
       { 
        sleep(0.01); 
       } 
       CVPixelBufferRef buffer = NULL; 
       buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size]; 
       if (buffer) 
       { 
        if(![adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]) 
         NSLog(@"FAIL"); 
        else 
         NSLog(@"Success:%d",i); 
        CVPixelBufferRelease(buffer); 
       } 
      } 
    i++; 
     } 
    } 
}]; 

// 그리고 나는 여기에서 CVPixelBufferRef했다 CGImageRef

- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size 
{ 
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: 
         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; 
CVPixelBufferRef pxbuffer = NULL; 

CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer); 

NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

CVPixelBufferLockBaseAddress(pxbuffer, 0); 
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 
NSParameterAssert(pxdata != NULL); 

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst); 
NSParameterAssert(context); 

CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image); 
CGColorSpaceRelease(rgbColorSpace); 
CGContextRelease(context); 

CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 
return pxbuffer; 

누설 로그이다

CVObject CFRetain 00 : 37.957.985 2 0x1ecae0 0 공동 reVideo CVPixelBufferPool :: createPixelBuffer (__ CFAllocator const *, __CFDictionary const *, int *) Malloc 96 바이트 Malloc 00 : 40.015.872 1 0x1f0750 96 CoreVideo CVBuffer :: init() CVPixelBuffer Malloc 00 : 40.969.716 1 0x1f2570 96 CoreVideo CVObject :: ALLOC (부호 없음 long, __CFAllocator의 CONST *, 부호 없음 long, 부호 없음 long)

답변

7

여기 봐 :

CVPixelBufferRef buffer = NULL; 
CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer); 
CVPixelBufferLockBaseAddress(buffer, 0); 
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size]; 

먼저 픽셀 버퍼가 생성됩니다와 주소 정보를 버퍼 변수를 넣어, 다음 같은 변수 가져 pixelBufferFromCGImage로 덮어 쓰므로 이전 내용을 더 이상 릴리스 할 수 없습니다. 그냥 내가 사용하는 코드를 제거했습니다

편집, 그래서 내 대답은 더 이상 지금 적용 할 수 없습니다. 이 버전은하지 픽셀 버퍼 풀,하지만 내가 여기 CVPixelBufferRelease에게 전화를 놓칠 수 있기 때문에, 괜찮

CVPixelBufferRef buffer = NULL; 
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size]; 
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(totalTime-nextTime+(nextTime/2.0), 1)]; 
NSLog(@"%f",totalTime-nextTime+(nextTime/2.0)); 
... 

당신은 CVPixelBufferPoolRelease (adaptor.pixelBufferPool)를 주석 한 :

이제이 부분 (완충기).

+0

죄송합니다. 그러나 이것은 저에게 도움이되지 않습니다 ... –

+0

방금 ​​편집 : 픽셀 버퍼 누설처럼 보이는 다른 위치를 지적했습니다. –

+0

감사합니다. 나는 조금 하품하고 ... 또 다른 질문이 있습니다 ... 더 많은 이미지 버퍼 메모리를 추가하면 사용되는 메모리가 크고 응용 프로그램이 손상되었습니다 ... –