RosyWriterデモのテストアプリを元のコードで作成しています。1920*1080のサイズのビデオを撮ると、実際にはプレビューで1620 * 1080の領域しか見ることができません(アスペクト比4 / 3、960 * 640) )、ビデオのすべてのコンテンツ(ウィンドウサイズは960 * 540である必要があります)を表示するようにコードを変更し、ビューの上下に黒いフレームを表示します。以下のコードがこの結果を達成したことがわかります。
- (CGRect)textureSamplingRectForCroppingTextureWithAspectRatio:(CGSize)textureAspectRatio toAspectRatio:(CGSize)croppingAspectRatio
{
CGRect normalizedSamplingRect = CGRectZero;
CGSize cropScaleAmount = CGSizeMake(croppingAspectRatio.width / textureAspectRatio.width, croppingAspectRatio.height / textureAspectRatio.height);
CGFloat maxScale = fmax(cropScaleAmount.width, cropScaleAmount.height);
CGSize scaledTextureSize = CGSizeMake(textureAspectRatio.width * maxScale, textureAspectRatio.height * maxScale);
if ( cropScaleAmount.height > cropScaleAmount.width ) {
normalizedSamplingRect.size.width = croppingAspectRatio.width / scaledTextureSize.width;
normalizedSamplingRect.size.height = 1.0;
}
else {
normalizedSamplingRect.size.height = croppingAspectRatio.height / scaledTextureSize.height;
normalizedSamplingRect.size.width = 1.0;
}
// Center crop
normalizedSamplingRect.origin.x = (1.0 - normalizedSamplingRect.size.width)/2.0;
normalizedSamplingRect.origin.y = (1.0 - normalizedSamplingRect.size.height)/2.0;
return normalizedSamplingRect;
}
- (void)displayPixelBuffer:(CVImageBufferRef)pixelBuffer
{
if (frameBufferHandle == 0) {
BOOL success = [self initializeBuffers];
if ( !success ) {
NSLog(@"Problem initializing OpenGL buffers.");
}
}
if (videoTextureCache == NULL)
return;
// Create a CVOpenGLESTexture from the CVImageBuffer
size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer);
CVOpenGLESTextureRef texture = NULL;
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
frameWidth,
frameHeight,
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
&texture);
if (!texture || err) {
NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
return;
}
glBindTexture(CVOpenGLESTextureGetTarget(texture), CVOpenGLESTextureGetName(texture));
// Set texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindFramebuffer(GL_FRAMEBUFFER, frameBufferHandle);
// Set the view port to the entire view
glViewport(0, 0, renderBufferWidth, renderBufferHeight);
//NSLog(@"render width:%d height:%d",renderBufferWidth,renderBufferHeight);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
// The texture vertices are set up such that we flip the texture vertically.
// This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system.
CGRect textureSamplingRect = [self textureSamplingRectForCroppingTextureWithAspectRatio:CGSizeMake(frameWidth, frameHeight) toAspectRatio:self.bounds.size];
GLfloat textureVertices[] = {
CGRectGetMinX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),//0.078125, 1.0
CGRectGetMaxX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),//0.921875, 1.0
CGRectGetMinX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),//0.078125, 1.0
CGRectGetMaxX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),//0.921875, 0.0
};
// Draw the texture on the screen with OpenGL ES 2
[self renderWithSquareVertices:squareVertices textureVertices:textureVertices];
//[oglContext presentRenderbuffer:GL_RENDERBUFFER];
glBindTexture(CVOpenGLESTextureGetTarget(texture), 0);
// Flush the CVOpenGLESTexture cache and release the texture
CVOpenGLESTextureCacheFlush(videoTextureCache, 0);
CFRelease(texture);
}
効果を得るには、textureSamplingRectとtextureVerticesの部分を変更する必要があると思いますが、私はopengl esの初心者なので、誰かがこれを詳細に行う方法を理解できますか?