1

以下のコードを見つけてください。

-(void) writeImagesAsMovie:(NSArray *)array toPath:(NSString*)path numPhoto:(NSInteger)totPics {

    ALAsset *asset = [assets objectAtIndex:0];

    ALAssetRepresentation *assetRepresentation = [asset defaultRepresentation];    
    UIImage *getImage = [UIImage imageWithCGImage:[assetRepresentation fullScreenImage] scale:[assetRepresentation scale] orientation:(UIImageOrientation)[assetRepresentation orientation]];
    UIImage *first = [getImage imageByScalingAndCroppingForSize:CGSizeMake(720.0, 960.0)];

    CGSize frameSize = CGSizeMake(first.size.width,first.size.height);

    NSLog(@"frameSize = %@",NSStringFromCGSize(frameSize));
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];

    if(error) {
        NSLog(@"error creating AssetWriter: %@",[error description]);
    }
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
                                   AVVideoScalingModeResizeAspect,AVVideoScalingModeKey,
                                   nil];


    AVAssetWriterInput* writerInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];


    CGAffineTransform transform = CGAffineTransformIdentity;
    UIImageOrientation orient = first.imageOrientation;
    CGSize imageSize = first.size;

    switch(orient) {

        case UIImageOrientationUp: //EXIF = 1
            transform = CGAffineTransformIdentity;
            break;

        case UIImageOrientationUpMirrored: //EXIF = 2
            transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
            transform = CGAffineTransformScale(transform, -1.0, 1.0);
            break;

        case UIImageOrientationDown: //EXIF = 3
            transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
            transform = CGAffineTransformRotate(transform, M_PI);
            break;

        case UIImageOrientationDownMirrored: //EXIF = 4
            transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
            transform = CGAffineTransformScale(transform, 1.0, -1.0);
            break;

        case UIImageOrientationLeftMirrored: //EXIF = 5

            transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
            transform = CGAffineTransformScale(transform, -1.0, 1.0);
            transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
            break;

        case UIImageOrientationLeft: //EXIF = 6

            transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
            transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
            break;

        case UIImageOrientationRightMirrored: //EXIF = 7

            transform = CGAffineTransformMakeScale(-1.0, 1.0);
            transform = CGAffineTransformRotate(transform, M_PI / 2.0);
            break;

        case UIImageOrientationRight: //EXIF = 8

            transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
            transform = CGAffineTransformRotate(transform, M_PI / 2.0);
            break;

        default:
            [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
    }


    writerInput.transform = transform;

    NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
    [attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:attributes];

    [videoWriter addInput:writerInput];

    // fixes all errors
    writerInput.expectsMediaDataInRealTime = YES;

    //Start a session:
    [videoWriter startWriting];

    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[first CGImage]];
    BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    if (result == NO)
        NSLog(@"failed to append buffer");

    if(buffer) {
        CVBufferRelease(buffer);
    }

    int fps = 2;

    for(int i=0; i<totPics; i++)
    {

        if (adaptor.assetWriterInput.readyForMoreMediaData) {


            CMTime frameTime = CMTimeMake(1, fps);
            CMTime lastTime = CMTimeMake(i, fps);


            CMTime presentTime = CMTimeAdd(lastTime, frameTime);
            NSLog(@"presentTime = %f",CMTimeGetSeconds(presentTime));


            ALAsset *asset = [assets objectAtIndex:i];
            ALAssetRepresentation *assetRepresentation = [asset defaultRepresentation];
            UIImage *imgGetFrame = [UIImage imageWithCGImage:[assetRepresentation fullScreenImage] scale:[assetRepresentation scale] orientation:(UIImageOrientation)[assetRepresentation orientation]];
            UIImage *imgFrame = [imgGetFrame imageByScalingAndCroppingForSize:CGSizeMake(720.0, 960.0)];

            buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
            BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];

            if (result == NO) //failes on 3GS, but works on iphone 4
            {
                NSLog(@"failed to append buffer");
                NSLog(@"The error is %@", [videoWriter error]);
            }

            if(buffer) {
                CVBufferRelease(buffer);
            }


        } else {
            NSLog(@"error");
        }        
    }

    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter finishWritingWithCompletionHandler:^{

        NSLog(@"Complete");
    }];

    CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}    

-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                         nil];
CVPixelBufferRef pxbuffer = NULL;

CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
                    CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                    &pxbuffer);

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
                                             CGImageGetHeight(image), 8, 4*CGImageGetWidth(image), rgbColorSpace,
                                             kCGImageAlphaNoneSkipFirst);

CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));


CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                       CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;
}

トリミングされた画像メソッド

-(UIImage*)imageByScalingAndCroppingForSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);

if (CGSizeEqualToSize(imageSize, targetSize) == NO)
{
    CGFloat widthFactor = targetWidth / width;
    CGFloat heightFactor = targetHeight / height;

    if (widthFactor > heightFactor)
    {
        scaleFactor = widthFactor; // scale to fit height
    }
    else
    {
        scaleFactor = heightFactor; // scale to fit width
    }

    scaledWidth  = width * scaleFactor;
    scaledHeight = height * scaleFactor;

    // center the image
    if (widthFactor > heightFactor)
    {
        thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
    }
    else
    {
        if (widthFactor < heightFactor)
        {
            thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
        }
    }
}

UIGraphicsBeginImageContext(targetSize); // this will crop

CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width  = scaledWidth;
thumbnailRect.size.height = scaledHeight;

[sourceImage drawInRect:thumbnailRect];

newImage = UIGraphicsGetImageFromCurrentImageContext();

if(newImage == nil) {
    NSLog(@"could not scale image");
}

//pop the context to get back to the default
UIGraphicsEndImageContext();

return newImage;
}

ユーザーの写真ライブラリから画像を渡し、720 x 960 にトリミングした後に画像を追加しました。

100枚の画像を撮ったとき、メモリ警告エラーが発生しました。また、計測器でアプリケーションを確認したところ、約 400 mb かかりました。だから、誰かが私が間違っていることを知っているなら、私を助けてください。

4

1 に答える 1

1

問題は、ビデオ処理ループですべてのアプリ メモリを使い果たしていることです。同時に数百の画像をメモリに割り当てることはできません。コードを iOS デバイスで実行するとクラッシュします。video_and_memory_usage_on_ios_devicesの件名に関する私のブログ投稿を読んでから、for ループを変更して、ループの反復ごとに自動解放プールが作成され、暴走したメモリ使用量を修正してください。またkCVPixelFormatType_32ARGB、非常に遅くなることに注意してください。使用する必要がありますkCVPixelFormatType_32BGRA

于 2013-08-08T21:25:27.907 に答える