0

再生中のビデオの一部になるように、MPMoviePlayerViewController によって再生中のビデオにテキストまたは文字列を追加したいと考えています。そのため、その動画を Facebook や Twitter に投稿するときは、テキストを動画の上に表示する必要があります。

このために、ビデオのすべてのフレームを取得してから、それらの各フレームにテキストを書き込んでから、それらすべてのフレームのビデオを再度作成しようとしました。しかし、この方法では、メモリの問題が発生し、デバイスでクラッシュします。

    - (NSArray*)getVideoFramesFromMovieController:(MPMoviePlayerViewController*)mpMoviePlayerVC
{       NSLog(@"Getting frames from a video asset.");

    //  videoFrames = [NSMutableArray array];
    NSMutableArray *videoFrames = [NSMutableArray array];


    for(float i= 0; i <= mpMoviePlayerVC.moviePlayer.duration; )
    {
        UIImage *singleFrameImage = [mpMoviePlayerVC.moviePlayer thumbnailImageAtTime:i timeOption:MPMovieTimeOptionExact];



        [videoFrames addObject:singleFrameImage];

        NSLog(@"Got frame number : %d",[videoFrames count]);

        i = i + (1/self.frameRate) ;    //frame capturing duration i.e. 15fps  //self.frameRate

    }
    NSLog(@"Total frames: %d",[videoFrames count]);
    return [NSArray arrayWithArray:videoFrames];
}

上記の方法ですべてのフレームが得られます。これらすべてのテキストにテキストを書き、「こんにちは」と言ってから、これらすべてのフレームのビデオを作成します。

    -(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size
{
    NSLog(@"Inside writeImageAsMovie method.");

    NSError *error              = nil;


    AVAssetWriter *videoWriter  = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
                                                              error:&error];


    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,   
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,   
                                   nil];
    AVAssetWriterInput* writerInput = [[AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings] retain];



    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:nil];


    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];


    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];


    dispatch_queue_t    dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
    int __block         frame = 0;

    [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
        while ([writerInput isReadyForMoreMediaData])
        {
            NSLog(@"Total frames to be written: %d",[array count]);
            if(++frame >= [array count])   //total frames
            {
                [writerInput markAsFinished];
                [videoWriter finishWriting];
                [videoWriter release];
                break;
            }

            CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[[array objectAtIndex:frame]CGImage] andSize:size];
            if (buffer)
            {
                if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, self.frameRate)])
                    NSLog(@"FAIL");
                else
                    NSLog(@"Success:%d", frame);
                CFRelease(buffer);
            }
        }
    }];

    NSLog(@"outside for loop");


    [self performSelector:@selector(waitTillVideoFinishes) withObject:nil afterDelay:20.0];
}

Mac では問題なく動作していますが、デバイスではメモリの問題でクラッシュします。

ビデオのテキストに透かしを入れる方法もいろいろ試しましたが、うまくいきませんでした。前もって感謝します。

4

2 に答える 2

2

このコードは、ビデオにテキストまたは文字列を追加し、ビデオを保存した後、任意のプレーヤーで再生します。このコードの最大の利点は、音声付きのビデオを提供することです。

#import <AVFoundation/AVFoundation.h>

-(void)MixVideoWithText
{
    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    //If you need audio as well add the Asset Track for audio here

    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
    [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];

    [compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];

    CGSize sizeOfVideo=[videoAsset naturalSize];
    //NSLog(@"sizeOfVideo.width is %f",sizeOfVideo.width);
    //NSLog(@"sizeOfVideo.height is %f",sizeOfVideo.height);
    //TextLayer defines the text they want to add in Video

    CATextLayer *textOfvideo=[[CATextLayer alloc] init];
    textOfvideo.string=[NSString stringWithFormat:@"%@",text];//text is shows the text that you want add in video.
    [textOfvideo setFont:(__bridge CFTypeRef)([UIFont fontWithName:[NSString stringWithFormat:@"%@",fontUsed] size:13])];//fontUsed is the name of font
    [textOfvideo setFrame:CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height/6)];
    [textOfvideo setAlignmentMode:kCAAlignmentCenter];
    [textOfvideo setForegroundColor:[selectedColour CGColor]];


    CALayer *optionalLayer=[CALayer layer];
    [optionalL addSublayer:textOfvideo];
    optionalL.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
    [optionalL setMasksToBounds:YES];

    CALayer *parentLayer=[CALayer layer];
    CALayer *videoLayer=[CALayer layer];
    parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
    videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:optionalLayer];

    AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
    videoComposition.frameDuration=CMTimeMake(1, 10);
    videoComposition.renderSize=sizeOfVideo;
    videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
    videoComposition.instructions = [NSArray arrayWithObject: instruction];

    NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
    NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
    [dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"];
    NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/utput_%@.mov", [dateFormatter stringFromDate:[NSDate date]]];

    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
    exportSession.videoComposition=videoComposition;

    exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
    exportSession.outputFileType = AVFileTypeQuickTimeMovie;
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
    switch (exportSession.status)
    {
        case AVAssetExportSessionStatusCompleted:
            NSLog(@"Export OK");
            if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
                UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, @selector(video:didFinishSavingWithError:contextInfo:), nil);
            }
            break;
        case AVAssetExportSessionStatusFailed:
            NSLog (@"AVAssetExportSessionStatusFailed: %@", exportSession.error);
            break;
        case AVAssetExportSessionStatusCancelled:
            NSLog(@"Export Cancelled");
            break;
    }
  }];
}

ビデオを保存した後に発生するエラーを示します。

-(void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo 
{
    if(error)
        NSLog(@"Finished saving video with error: %@", error); 
}
于 2014-02-25T14:12:55.250 に答える