7

私はいくつかの写真から映画を作成しようとしています。HD画像({720、1280})以下の解像度で問題なく動作します。しかし、フルHD画像{1080、1920}でムービーを作成しようとすると、ビデオがスクランブルされます。http://www.youtube.com/watch?v=BfYldb8e_18の外観を確認するためのリンクは次のとおりです。私が間違っているかもしれないことについて何か考えがありますか?

- (void) createMovieWithOptions:(NSDictionary *) options
{
@autoreleasepool {
    NSString *path = [options valueForKey:@"path"];
    CGSize size =  [(NSValue *)[options valueForKey:@"size"] CGSizeValue];
    NSArray *imageArray = [options valueForKey:@"pictures"];
    NSInteger recordingFPS = [[options valueForKey:@"fps"] integerValue];
    BOOL success=YES;
    NSError *error = nil;

    AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(assetWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithFloat:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithFloat:size.height], AVVideoHeightKey,
                                   nil];

    AVAssetWriterInput *videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                              outputSettings:videoSettings];

    // Configure settings for the pixel buffer adaptor.
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                                                                                     sourcePixelBufferAttributes:bufferAttributes];

    NSParameterAssert(videoWriterInput);
    NSParameterAssert([assetWriter canAddInput:videoWriterInput]);

    videoWriterInput.expectsMediaDataInRealTime = NO;
    [assetWriter addInput:videoWriterInput];

    //Start a session:
    [assetWriter startWriting];
    [assetWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage.

    int frameCount = 0;
    float progress = 0;
    float progressFromFrames = _progressView.progress; //only for create iflipbook movie

    for(UIImage * img in imageArray)
    {
        if([[NSThread currentThread] isCancelled])
        {
            [NSThread exit];
        }

        [condCreateMovie lock];
        if(isCreateMoviePaused)
        {
            [condCreateMovie wait];
        }

        uint64_t totalFreeSpace=[Utils getFreeDiskspace];
        if(((totalFreeSpace/1024ll)/1024ll)<50)
        {
            success=NO;
            break;
        }

        //        @autoreleasepool {
        NSLog(@"size:%@",NSStringFromCGSize(img.size));

        buffer = [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:[img CGImage] andSize:size];

        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 60)
        {
            if(adaptor.assetWriterInput.readyForMoreMediaData)
            {
                CMTime frameTime = CMTimeMake(frameCount, recordingFPS);
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];

                CVPixelBufferRelease(buffer);

                [NSThread sleepForTimeInterval:0.1];


                if(isCreatingiFlipBookFromImported)
                    progress = (float)frameCount/(float)[imageArray count]/2.0 + progressFromFrames;
                else
                    progress = (float)frameCount/(float)[imageArray count];

                [[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationProgress" object:[NSNumber numberWithFloat:progress]];
            }
            else
            {
                [NSThread sleepForTimeInterval:0.5];
            }
            j++;
        }
        if (!append_ok)
        {
            NSLog(@"error appending image %d times %d\n", frameCount, j);
        }
        frameCount++;

        [condCreateMovie unlock];
    }

    //Finish the session:
    [videoWriterInput markAsFinished];
    [assetWriter finishWriting];

    NSDictionary *dict = [NSDictionary dictionaryWithObjectsAndKeys:
                          [NSNumber numberWithBool:success], @"success",
                          path, @"path", nil];

    [[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationFinished" object:dict];
}
}

*編集 。[[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:]のコードは次のとおりです。

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
@autoreleasepool {
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
                                          size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, 4*size.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

     return pxbuffer;
}
}
4

4 に答える 4

3

私は同じ問題を抱えていましたが、この回答で解決しました.ビデオのサイズは16の倍数でなければなりません.

于 2013-12-03T16:31:04.270 に答える
2

ピクセルバッファを取得するには、このようなものはどうですか

    //you could use a cgiimageref here instead
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(imageView.image.CGImage));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(imageView.image.CGImage),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();  
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;  
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake(elapsedTime * TIME_SCALE, TIME_SCALE);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor  appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
    CVPixelBufferRelease(pixelBuffer);
    CFRelease(imageData);
    if (appended) {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } else {
        NSLog (@"failed to append");
        [self stopRecording];
        self.startStopButton.selected = NO;
    }
于 2012-12-13T17:57:29.400 に答える
2

これがハードウェアの制限またはバグであることは間違いありません。レーダーを提出してください。

于 2012-12-13T05:41:59.440 に答える
-1

キャプチャ設定プリセットを設定することもできますが、通常はhighが適切であり、これがデフォルトです*/sessionPresetプロパティを使用してキャプチャ設定プリセットを定義する定数。

NSString * const AVCaptureSessionPresetPhoto;

NSString * const AVCaptureSessionPresetHigh;

NSString * const AVCaptureSessionPresetMedium;

NSString * const AVCaptureSessionPresetLow;

NSString * const AVCaptureSessionPreset352x288;

NSString * const AVCaptureSessionPreset640x480;

NSString * const AVCaptureSessionPreset1280x720;

NSString * const AVCaptureSessionPreset1920x1080;

NSString * const AVCaptureSessionPresetiFrame960x540;

NSString * const AVCaptureSessionPresetiFrame1280x720; * /

//このように設定します

self.captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;

//またはavcapturesessionを定義するときにこのように

[self.captureSession setSessionPreset:AVCaptureSessionPreset1920x1080];

于 2012-12-16T21:58:30.343 に答える