uiElementInput (UIImageView である imageSequence) のサイズと配置を定義しようとしていますが、うまくいきません。動画は画像を引き伸ばして生成しています。
私はこれに数日間立ち往生しています。どんな助けでも大歓迎です!
これが私がこれまでに持っているものです:
videoCamera.outputImageOrientation = UIInterfaceOrientationLandscapeLeft;
videoCamera.horizontallyMirrorFrontFacingCamera = NO;
videoCamera.horizontallyMirrorRearFacingCamera = NO;
filter = [[GPUImageFilter alloc] init];
[videoCamera addTarget:filter];
blendFilter = [[GPUImageAlphaBlendFilter alloc] init];
[(GPUImageAlphaBlendFilter *)blendFilter setMix:1.0];
[blendFilter forceProcessingAtSizeRespectingAspectRatio:CGSizeMake(148, 250)];
imageSequence = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 148, 250)];
imageSequence.contentMode = UIViewContentModeScaleAspectFit;
uiElementInput = [[GPUImageUIElement alloc] initWithView:imageSequence];
filterTrans1 = [[GPUImageTransformFilter alloc] init];
[filterTrans1 forceProcessingAtSize:CGSizeMake(148, 250)];
[filter addTarget:blendFilter];
[filterTrans1 addTarget:blendFilter];
[uiElementInput addTarget:blendFilter];
pathToMovie = [NSTemporaryDirectory() stringByAppendingPathComponent:@"moviez.m4v"];
unlink([pathToMovie UTF8String]);
movieURL = [NSURL fileURLWithPath:pathToMovie];
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 320.0)];
[blendFilter addTarget:movieWriter];
__unsafe_unretained GPUImageUIElement *weakUIElementInput = uiElementInput;
[filter setFrameProcessingCompletionBlock:^(GPUImageOutput * filter, CMTime frameTime){
if (imageIndex == 20)
imageIndex = 1;
if (imageFrames == 5)
{
imageFrames = 0;
imageSequence.frame = CGRectMake(20, 20, 148, 250);
imageSequence.contentMode = UIViewContentModeScaleAspectFit;
imageSequence.image = [UIImage imageNamed:[NSString stringWithFormat:@"testeImage%i", imageIndex]];
[weakUIElementInput update];
imageIndex++;
}
imageFrames++;
}];
[videoCamera startCameraCapture];
...