1

録画中にビデオにテキストを埋め込み、それをカメラ ロールに保存したいと考えています。この動画を他のデバイスで再生すると、このテキストが表示されます

4

1 に答える 1

0

ここから:ビデオにオーバーレイ テキストを追加してから再エンコードするにはどうすればよいですか?

これは ARC 用に更新されていないことに注意してください。ただし、方法論についての良いアイデアが得られるはずです。この例には画像とテキストの追加が含まれていますが、iOS 7 API/CoreText を使用するより高速な方法があるかもしれません。

また、このようなビデオのリアルタイム合成は、必要なフレームをビットマップに解凍し、テキストを描画してから、ビデオを再圧縮することを意味します。これはコストのかかる操作であり、パフォーマンスが低下する可能性があります。これは、モバイル アプリケーションに適したものではありません。

ビデオファイルにテキストと画像を追加するコードは次のとおりです。

AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:videoName]  options:nil];

AVMutableComposition* mixComposition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];

[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];

CGSize videoSize = [clipVideoTrack naturalSize]; 

UIImage *myImage = [UIImage imageNamed:@"29.png"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(videoSize.width - 65, videoSize.height - 75, 57, 57);
aLayer.opacity = 0.65;
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];

CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = @"Text goes here";
titleLayer.font = @"Helvetica";
titleLayer.fontSize = videoSize.height / 6;
//?? titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height / 6); //You may need to adjust this for proper display
[parentLayer addSublayer:titleLayer]; //ONLY IF WE ADDED TEXT

AVMutableVideoComposition* videoComp = [[AVMutableVideoComposition videoComposition] retain];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];

AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];//AVAssetExportPresetPassthrough
assetExport.videoComposition = videoComp;

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* VideoName = [NSString stringWithFormat:@"%@/mynewwatermarkedvideo.mp4",documentsDirectory];


 //NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:VideoName];
NSURL *exportUrl = [NSURL fileURLWithPath:VideoName];

if ([[NSFileManager defaultManager] fileExistsAtPath:VideoName])
{
    [[NSFileManager defaultManager] removeItemAtPath:VideoName error:nil];
}

assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;

 //[strRecordedFilename setString: exportPath];

[assetExport exportAsynchronouslyWithCompletionHandler:
 ^(void ) {
     [assetExport release];
     dispatch_async(dispatch_get_main_queue(), ^{
         [self exportDidFinish:assetExport];
     });
 }
 ];

[videoAsset release];

}

 -(void)exportDidFinish:(AVAssetExportSession*)session
{
    NSURL *exportUrl = session.outputURL;
    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];

    if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportUrl])
    {
        [library writeVideoAtPathToSavedPhotosAlbum:exportUrl completionBlock:^(NSURL *assetURL, NSError *error)
        {
            dispatch_async(dispatch_get_main_queue(), ^{
                if (error) {
                    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Video Saving Failed"
                                                                   delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
                    [alert show];
                } else {
                    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Video Saved" message:@"Saved To Photo Album"
                                                                   delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
                    [alert show];
                }
            });
        }];

    }
    NSLog(@"Completed");
    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"AlertView" message:@"Video is edited successfully." delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
    [alert show];
    [alert release];

}
于 2013-09-25T07:14:39.583 に答える