-2

OpenGL テクスチャをレンダリングしてムービーを生成しています。結果のムービーの一部のフレームは、前のフレームの一部を示しているため、完全にレンダリングされていないように見えます。NSThread [NSThread sleepForTimeInterval:0.05]; を追加すると、問題は発生しませんが、この指示に頼ることはできません。

これは [NSThread sleepForTimeInterval:0.05] なしの結果です

[NSThread sleepForTimeInterval:0.05]を追加した結果です。

私が使用するコードは次のとおりです。

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
             {                   
               //Video writer
               AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                             [NSURL fileURLWithPath:tempVideoPath]
                                                                      fileType:AVFileTypeQuickTimeMovie
                                                                         error:&error];
               NSParameterAssert(videoWriter);

               NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              AVVideoCodecH264, AVVideoCodecKey,
                                              [NSNumber numberWithInt:MOVsize.width], AVVideoWidthKey,
                                              [NSNumber numberWithInt:MOVsize.height], AVVideoHeightKey,
                                              nil];
               AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                                       assetWriterInputWithMediaType:AVMediaTypeVideo
                                                       outputSettings:videoSettings];
               videoWriterInput.expectsMediaDataInRealTime=NO;

               NSParameterAssert(videoWriterInput);
               NSParameterAssert([videoWriter canAddInput:videoWriterInput]);

               [videoWriter addInput:videoWriterInput];

               NSDictionary* pixelAttributesDict;

                 pixelAttributesDict= [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                       [NSNumber numberWithInt:1024], kCVPixelBufferWidthKey,
                                       [NSNumber numberWithInt:768], kCVPixelBufferHeightKey,
                                       nil];

               AVAssetWriterInputPixelBufferAdaptor* adaptor=[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:pixelAttributesDict];
               [videoWriter startWriting];
               [videoWriter startSessionAtSourceTime:kCMTimeZero];

               if([EAGLContext currentContext]!= glContext)
                 [EAGLContext setCurrentContext:glContext];

               [self createDataFBO:adaptor];

               for (int frame=0;frame<samplesNumber;frame++){

                 while (!videoWriterInput.readyForMoreMediaData)
                 {
                   NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
                   [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
                 }

                   //glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

                   //Render current frame with openGL
                   [self renderFrameAt:frame];

                   CVReturn lockResult= CVPixelBufferLockBaseAddress(renderTarget, 0);
                   BOOL test =(lockResult==kCVReturnSuccess) && [adaptor appendPixelBuffer:renderTarget withPresentationTime:CMTimeMake(frame, kFps)];
                   if(!test) {
                     NSLog(@"append failed!");
                   }

// ここに追加すると [NSThread sleepForTimeInterval:0.05]; レンダリングは正常に動作します

                   CVPixelBufferUnlockBaseAddress(renderTarget, 0);

                 dispatch_async(dispatch_get_main_queue(), ^
                                {
                                  [self updateProgressBar];
                                });
               }

               [videoWriterInput markAsFinished];
               [videoWriter endSessionAtSourceTime:CMTimeMake(samplesNumber, kFps)];
               [videoWriter finishWritingWithCompletionHandler:^{

               [self destroyDataFBO];

               //End of movie generation
               }];
             });

}

これは、テクスチャ キャッシュを作成するために使用するコードです。

    - (void)createDataFBO:(AVAssetWriterInputPixelBufferAdaptor *) adaptor;
    {
      glActiveTexture(GL_TEXTURE1);
      glGenFramebuffers(1, &movieFramebuffer);
      glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

    #if defined(__IPHONE_6_0)
      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [EAGLContext currentContext], NULL, &coreVideoTextureCache);
    #else
      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[EAGLContext currentContext], NULL, &coreVideoTextureCache);
    #endif

      if (err)
      {
        NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
      }

      // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
      CVPixelBufferPoolCreatePixelBuffer (NULL, [adaptor pixelBufferPool], &renderTarget);

      size_t frameWidth = CVPixelBufferGetWidth(renderTarget);
      size_t frameHeight = CVPixelBufferGetHeight(renderTarget);

      CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
                                                    NULL, // texture attributes
                                                    GL_TEXTURE_2D,
                                                    GL_RGBA, // opengl format
                                                    frameWidth,
                                                    frameHeight,
                                                    GL_BGRA, // native iOS format
                                                    GL_UNSIGNED_BYTE,
                                                    0,
                                                    &renderTexture);

      glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

      glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);

      GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);

      NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
    }
4

1 に答える 1

0

Have you tried glFlush() after doing your rendering?

于 2013-07-26T14:28:07.770 に答える