3

AVAssetWriterを介してビデオにopenglレンダリング(グリーンスクリーニングに使用しています)を保存するために、1週間オフとオンで戦っています。

私が何をしているかを示すために、以下に簡単なリグを作成しました。

Apple フォーラムで質問したところ、プロセスに関するアドバイスを受け取りました。これについては、allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ にも記載されています。 GPUImage ライブラリで使用されます。

私の知る限り、私はまったく同じことをしています-GPUImageのメソッドを使用してFBOを作成しています。

描画に問題がないことを確認しました (このコードにも描画メソッドがあり、無効になっています)。

FBO は正常に作成され、成功を返します: glCheckFramebufferStatus

クラッシュ、例外、警告はなく、ライターは正常な状態で、すべてのテクスチャキャッシュ、バッファなどはエラーなしで作成されています。

ただし、ビデオ出力にはまだ黒が表示されます。

glClear を白に設定すると、要求したビデオ サイズではない白い長方形が表示されます。

三角形のレンダリングがビデオに表示されません。

#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "TestViewController.h"


/////////////////////////////////////////////////////////////////
// This data type is used to store information for each vertex
typedef struct
{
    GLKVector3 positionCoords;
}
        SceneVertex;

/////////////////////////////////////////////////////////////////
// Define vertex data for a triangle to use in example
static const SceneVertex vertices[] =
        {
                {{-1.0f, -1.0f, 1.0}}, // lower left corner
                {{1.0f, -1.0f, 0.5}}, // lower right corner
                {{1.0f, 1.0f, 0.0}}  // upper left corner
        };


@interface TestViewController ()

@property(nonatomic, readwrite, assign) CVOpenGLESTextureCacheRef videoTextureCache;
@property(strong, nonatomic) GLKTextureInfo *background;
@property(nonatomic, strong) AVAssetWriter *assetWriter;

@property(nonatomic) BOOL isRecording;

@property(nonatomic, strong) AVAssetWriterInput *assetWriterVideoInput;

@property(nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;

@property(nonatomic, assign) CFAbsoluteTime startTime;

@property(nonatomic, strong) GLKView *glkView;

@property(nonatomic, strong) GLKBaseEffect *screenGLEffect;
@property(nonatomic, strong) GLKBaseEffect *FBOGLEffect;

@property(nonatomic, strong) NSTimer *recordingTimer;

- (BOOL)isRetina;
@end


@implementation TestViewController
{
    CVOpenGLESTextureCacheRef _writerTextureCache;
    GLuint _writerRenderFrameBuffer;
    GLuint vertexBufferID;

    EAGLContext *_writerContext;
    CVOpenGLESTextureRef _writerTexture;
}

- (GLKBaseEffect *)createBasicDrawingEffectInCurrentContext
{
    GLKBaseEffect *basicGLEffect = [[GLKBaseEffect alloc] init];
    basicGLEffect.useConstantColor = GL_TRUE;
    basicGLEffect.constantColor = GLKVector4Make(
            .5f, // Red
            1.0f, // Green
            .5f, // Blue
            1.0f);// Alpha

    // Set the background color stored in the current context
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // background color

    // Generate, bind, and initialize contents of a buffer to be
    // stored in GPU memory
    glGenBuffers(1,                // STEP 1
            &vertexBufferID);
    glBindBuffer(GL_ARRAY_BUFFER,  // STEP 2
            vertexBufferID);
    glBufferData(                  // STEP 3
            GL_ARRAY_BUFFER,  // Initialize buffer contents
            sizeof(vertices), // Number of bytes to copy
            vertices,         // Address of bytes to copy
            GL_STATIC_DRAW);  // Hint: cache in GPU memory
    return basicGLEffect;
}


/////////////////////////////////////////////////////////////////
// 
- (void)viewDidUnload
{
    [super viewDidUnload];

    // Make the view's context current
    GLKView *view = (GLKView *) self.view;
    [EAGLContext setCurrentContext:view.context];

    // Stop using the context created in -viewDidLoad
    ((GLKView *) self.view).context = nil;
    [EAGLContext setCurrentContext:nil];

//////////////////////////////////////////////////////////////

#pragma mark AVWriter 設定 ////////////////////////////////////////// /////////////////

- (NSString *)tempFilePath
{
    return [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/output2.m4v"];
}


- (void)removeTempFile
{
    NSString *path = [self tempFilePath];
    NSFileManager *fileManager = [NSFileManager defaultManager];

    BOOL exists = [fileManager fileExistsAtPath:path];
    NSLog(@">>>remove %@ Exists %d", path, exists);

    NSError *error;
    unlink([path UTF8String]);

    NSLog(@">>>AFTER REMOVE %@ Exists %d %@", path, exists, error);

}

- (void)createWriter
{
    //My setup code is based heavily on the GPUImage project, https://github.com/BradLarson/GPUImage so some of these dictionary names and structure are similar to the code from that project - I recommend you check it out if you are interested in Video filtering/recording
    [self removeTempFile];

    NSError *error;
    self.assetWriter = [[AVAssetWriter alloc]
                                       initWithURL:[NSURL fileURLWithPath:[self tempFilePath]]
                                          fileType:AVFileTypeQuickTimeMovie
                                             error:&error];

    if (error)
    {
        NSLog(@"Couldn't create writer, %@", error.localizedDescription);
        return;
    }

    NSDictionary *outputSettings = @{
            AVVideoCodecKey : AVVideoCodecH264,
            AVVideoWidthKey : @640,
            AVVideoHeightKey : @480
    };

    self.assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                    outputSettings:outputSettings];

    self.assetWriterVideoInput.expectsMediaDataInRealTime = YES;

    NSDictionary *sourcePixelBufferAttributesDictionary = @{(id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
                                                            (id) kCVPixelBufferWidthKey : @640,
                                                            (id) kCVPixelBufferHeightKey : @480};

    self.assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterVideoInput
                                                                                                        sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];

    self.assetWriterVideoInput.transform = CGAffineTransformMakeScale(1, -1);

    if ([_assetWriter canAddInput:self.assetWriterVideoInput])
    {
        [_assetWriter addInput:self.assetWriterVideoInput];
    } else
    {
        NSLog(@"can't add video writer input %@", self.assetWriterVideoInput);
    }
    /*
    _assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil];
    if ([_assetWriter canAddInput:_assetWriterAudioInput]) {
        [_assetWriter addInput:_assetWriterAudioInput];
        _assetWriterAudioInput.expectsMediaDataInRealTime = YES;
    }
     */
}


- (void)writeMovieToLibraryWithPath:(NSURL *)path
{
    NSLog(@"writing %@ to library", path);
    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    [library writeVideoAtPathToSavedPhotosAlbum:path
                                completionBlock:^(NSURL *assetURL, NSError *error) {
                                    if (error)
                                    {
                                        NSLog(@"Error saving to library%@", [error localizedDescription]);
                                    } else
                                    {
                                        NSLog(@"SAVED %@ to photo lib", path);
                                    }
                                }];
}


//////////////////////////////////////////////////////////////
#pragma mark touch handling
//////////////////////////////////////////////////////////////

- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
    [super touchesEnded:touches withEvent:event];
    if (self.isRecording)
    {
        [self finishRecording];
    } else
    {
        [self startRecording];
    }
}

//////////////////////////////////////////////////////////////
#pragma mark recording
//////////////////////////////////////////////////////////////


- (void)startRecording;
{
    NSLog(@"started recording");
#warning debugging startrecording
//    NSLog(@"bypassing usual write method");
//      if (![assetWriter startWriting]){
//        NSLog(@"writer not started %@, %d", assetWriter.error, assetWriter.status);
//    }
    self.startTime = CFAbsoluteTimeGetCurrent();

    [self createWriter];
    [self.assetWriter startWriting];
    [self.assetWriter startSessionAtSourceTime:kCMTimeZero];

    NSAssert([self.assetWriterPixelBufferInput pixelBufferPool], @"writerpixelbuffer input has no pools");

    if (!_writerContext)
    {
        _writerContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
        if (!_writerContext || ![EAGLContext setCurrentContext:_writerContext])
        {
            NSLog(@"Problem with OpenGL context.");

            return;
        }
    }
    [EAGLContext setCurrentContext:_writerContext];

    NSLog(@"Creating FBO");
    [self createDataFBOUsingGPUImagesMethod];
//    [self createDataFBO];
    self.isRecording = YES;
    NSLog(@"Recording is started");

    self.recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1 / 30
                                                           target:self
                                                         selector:@selector(tick:)
                                                         userInfo:nil repeats:YES];
}

- (void)tick:(id)tick
{
    [self drawBasicGLTOFBOForWriting];
}

- (void)finishRecording;
{
    [self.recordingTimer invalidate];
    self.recordingTimer = nil;

    NSLog(@"finished recording");
    if (self.assetWriter.status == AVAssetWriterStatusCompleted || !self.isRecording)
    {
        NSLog(@"already completed ingnoring");
        return;
    }

    NSLog(@"Asset writer writing");
    self.isRecording = NO;
//    runOnMainQueueWithoutDeadlocking(^{
    NSLog(@"markng inputs as finished");
    //TODO - these cause an error
    [self.assetWriterVideoInput markAsFinished];
    __weak TestViewController *blockSelf = self;

    [self.assetWriter finishWritingWithCompletionHandler:^{
        if (self.assetWriter.error == nil)
        {
            NSLog(@"saved ok - writing to lib");
            [self writeMovieToLibraryWithPath:[NSURL fileURLWithPath:[self tempFilePath]]];
        } else
        {
            NSLog(@" did not save due to error %@", self.assetWriter.error);
        }
    }];
//    });
}


- (void)drawBasicGLTOFBOForWriting
{
    if (!self.isRecording)
    {
        return;
    }
    [EAGLContext setCurrentContext:_writerContext];
    if (!self.FBOGLEffect)
    {
        self.FBOGLEffect = [self createBasicDrawingEffectInCurrentContext];
    }

    glDisable(GL_DEPTH_TEST);
    glBindFramebuffer(GL_FRAMEBUFFER, _writerRenderFrameBuffer);

    glClearColor(1, 1, 1, 1);
    glClear(GL_COLOR_BUFFER_BIT);

    [self.FBOGLEffect prepareToDraw];

    // Clear Frame Buffer (erase previous drawing)
    // Enable use of positions from bound vertex buffer
    glEnableVertexAttribArray(      // STEP 4
            GLKVertexAttribPosition);

    glVertexAttribPointer(          // STEP 5
            GLKVertexAttribPosition,
            3,                   // three components per vertex
            GL_FLOAT,            // data is floating point
            GL_FALSE,            // no fixed point scaling
            sizeof(SceneVertex), // no gaps in data
            NULL);               // NULL tells GPU to start at
    // beginning of bound buffer

    // Draw triangles using the first three vertices in the
    // currently bound vertex buffer
    glDrawArrays(GL_TRIANGLES,      // STEP 6
            0,  // Start with first vertex in currently bound buffer
            3); // Use three vertices from currently bound buffer
    glFlush();


    CFAbsoluteTime interval = (CFAbsoluteTimeGetCurrent() - self.startTime) * 1000;
    CMTime currentTime = CMTimeMake((int) interval, 1000);
    [self writeToFileWithTime:currentTime];
}

- (void)writeToFileWithTime:(CMTime)time
{
    if (!self.assetWriterVideoInput.readyForMoreMediaData)
    {
        NSLog(@"Had to drop a video frame");
        return;
    }
    if (kCVReturnSuccess == CVPixelBufferLockBaseAddress(_writerPixelBuffer,
            kCVPixelBufferLock_ReadOnly))
    {
        uint8_t *pixels = (uint8_t *) CVPixelBufferGetBaseAddress(_writerPixelBuffer);
        // process pixels how you like!
        BOOL success = [self.assetWriterPixelBufferInput appendPixelBuffer:_writerPixelBuffer
                                                      withPresentationTime:time];
        NSLog(@"wrote at %@ : %@", CMTimeCopyDescription(NULL, time), success ? @"YES" : @"NO");
        CVPixelBufferUnlockBaseAddress(_writerPixelBuffer, kCVPixelBufferLock_ReadOnly);
    }
}



//////////////////////////////////////////////////////////////
#pragma mark FBO setup
//////////////////////////////////////////////////////////////

- (void)createDataFBOUsingGPUImagesMethod;
{
    glActiveTexture(GL_TEXTURE1);
    glGenFramebuffers(1, &_writerRenderFrameBuffer);
    glBindFramebuffer(GL_FRAMEBUFFER, _writerRenderFrameBuffer);

    CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _writerContext, NULL, &_writerTextureCache);

    if (err)
    {
        NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
    }

    // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/


    CVPixelBufferPoolCreatePixelBuffer(NULL, [self.assetWriterPixelBufferInput pixelBufferPool], &_writerPixelBuffer);

    err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _writerTextureCache, _writerPixelBuffer,
            NULL, // texture attributes
            GL_TEXTURE_2D,
            GL_RGBA, // opengl format
            480,
            320,
            GL_BGRA, // native iOS format
            GL_UNSIGNED_BYTE,
            0,
            &_writerTexture);

    if (err)
    {
        NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
    }


    glBindTexture(CVOpenGLESTextureGetTarget(_writerTexture), CVOpenGLESTextureGetName(_writerTexture));
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(_writerTexture), 0);


    GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);

    NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
}


@end
4

2 に答える 2