7

私はビデオをキャプチャし、それを CGImage に変換して処理しています。約 10 秒間動作し、メモリ警告が表示されてからクラッシュします (通常、データ フォーマッタが一時的に利用できなかったと表示されます)。誰かが問題を解決するのを手伝ってくれますか?

- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

// CONVERT CMSAMPLEBUFFER INTO A CGIMAGE
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

CVPixelBufferLockBaseAddress(imageBuffer,0);


uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);


CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef cgimage = CGBitmapContextCreateImage(newContext);
UIImage *sourceImage= [UIImage imageWithCGImage:cgimage scale:1.0f orientation:UIImageOrientationLeftMirrored];
CGImageRelease(cgimage);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);

CVPixelBufferUnlockBaseAddress(imageBuffer,0);


// ***
// Adding code after this point to do image transformation usually causes it to crash
UIImage *rot = [self scaleAndRotateImage:sourceImage];
self.detectImage = rot;



}

画像を変換するコード....

- (UIImage*)scaleAndRotateImage:(UIImage *)image{
    int kMaxResolution = 320; // Or whatever

    CGImageRef imgRef = image.CGImage;

    CGFloat width = CGImageGetWidth(imgRef);
    CGFloat height = CGImageGetHeight(imgRef);

    CGAffineTransform transform = CGAffineTransformIdentity;
    CGRect bounds = CGRectMake(0, 0, width, height);
    if (width > kMaxResolution || height > kMaxResolution) {
        CGFloat ratio = width/height;
        if (ratio > 1) {
            bounds.size.width = kMaxResolution;
            bounds.size.height = bounds.size.width / ratio;
        }
        else {
            bounds.size.height = kMaxResolution;
            bounds.size.width = bounds.size.height * ratio;
        }
    }

    CGFloat scaleRatio = bounds.size.width / width;
    CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
    CGFloat boundHeight;
    UIImageOrientation orient = image.imageOrientation;
    switch(orient) {

        case UIImageOrientationUp: //EXIF = 1
            transform = CGAffineTransformIdentity;
            break;

        case UIImageOrientationUpMirrored: //EXIF = 2
            transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
            transform = CGAffineTransformScale(transform, -1.0, 1.0);
            break;

        case UIImageOrientationDown: //EXIF = 3
            transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
            transform = CGAffineTransformRotate(transform, M_PI);
            break;

        case UIImageOrientationDownMirrored: //EXIF = 4
            transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
            transform = CGAffineTransformScale(transform, 1.0, -1.0);
            break;

        case UIImageOrientationLeftMirrored: //EXIF = 5
            boundHeight = bounds.size.height;
            bounds.size.height = bounds.size.width;
            bounds.size.width = boundHeight;
            transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.height);
            transform = CGAffineTransformScale(transform, -1.0, 1.0);
            transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
            break;

        case UIImageOrientationLeft: //EXIF = 6
            boundHeight = bounds.size.height;
            bounds.size.height = bounds.size.width;
            bounds.size.width = boundHeight;
            transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
            transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
            break;

        case UIImageOrientationRightMirrored: //EXIF = 7
            boundHeight = bounds.size.height;
            bounds.size.height = bounds.size.width;
            bounds.size.width = boundHeight;
            transform = CGAffineTransformMakeScale(-1.0, 1.0);
            transform = CGAffineTransformRotate(transform, M_PI / 2.0);
            break;

        case UIImageOrientationRight: //EXIF = 8
            boundHeight = bounds.size.height;
            bounds.size.height = bounds.size.width;
            bounds.size.width = boundHeight;
            transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
            transform = CGAffineTransformRotate(transform, M_PI / 2.0);
            break;

        default:
            [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];

    }

    UIGraphicsBeginImageContext(bounds.size);

    CGContextRef context = UIGraphicsGetCurrentContext();

    if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
        CGContextScaleCTM(context, -scaleRatio, scaleRatio);
        CGContextTranslateCTM(context, -height, 0);
    }
    else {
        CGContextScaleCTM(context, scaleRatio, -scaleRatio);
        CGContextTranslateCTM(context, 0, -height);
    }

    CGContextConcatCTM(context, transform);

    CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
    UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    //[self setRotatedImage:imageCopy];
    return imageCopy;
}

この関数は、ビデオ出力のセットアップ方法を確認するための単なる背景です...

AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[videoOut setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // BGRA is necessary for manual preview
dispatch_queue_t my_queue = dispatch_queue_create("com.example.subsystem.taskXYZ", NULL);
[videoOut setSampleBufferDelegate:self queue:my_queue];
if ([self.captureSession canAddOutput:videoOut]) [self.captureSession addOutput:videoOut];
else NSLog(@"Couldn't add video output");
[videoOut release];
4

2 に答える 2

4

同様の問題がありました。結局、デリゲート オブジェクトで十分な速度で処理していなかったために、キューが未処理のフレームでいっぱいになりました。

私の解決策は(処理されたフレームごとに1回)行うことでした:

proctr++;
if ((proctr % 20) == 0) {
  deferImageProcessing = true;
  dispatch_sync(queue, ^{
    [self queueFlushed];
  });
}

- (void)queueFlushed {
  deferImageProcessing = false;
}

次に、実際の画像処理コードで

- (void)captureOutput:(AVCaptureOutput *)captureOutput
     didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
     fromConnection:(AVCaptureConnection *)connection
{ 
  if (deferImageProcessing)
    return;
  // do whatever else I'm doing...
}

基本的に、キューが空になるまで画像処理を一時停止することがあります。
これが役に立つことを願っています。

于 2010-09-30T01:12:00.813 に答える
0

ビデオ出力のセットアップでは、新しく作成されたディスパッチ キューは解放されません。で解除できます

dispatch_release(queue);

しかし、この関数が頻繁に呼び出されるとは思わないので、リークはおそらく別の場所で発生しています。コードを数回参照しましたが、他の犯人は見つかりませんでした...

Leaks Instrument ツールでリークを検索してみましたか?

于 2010-08-04T08:23:46.140 に答える