2

もともと方向が異なる 2 つのビデオをマージしようとしています。

動画 A - 縦向き - 720x1280 - MOV 動画 B - 横向き - 640x480 - MP4

ビデオAのサイズ変更とトリミングから始めます

- (void)resizeWithStyle:(NSString*)style {
NSString *filePath = [self.lastVideo path];
NSString *newPath = [filePath stringByReplacingOccurrencesOfString:@".mov" withString:@".mp4"];
NSURL *fullPath = [NSURL fileURLWithPath:newPath];
NSURL *path = [NSURL fileURLWithPath:filePath];

NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:self.lastVideo options:options];

NSInteger width = 640;
NSInteger height = 480;

if (([self orientationForTrack:asset] == UIInterfaceOrientationPortrait) || ([self orientationForTrack:asset] == UIInterfaceOrientationPortraitUpsideDown)) {
    width = 480;
    height = 640;
}

NSLog(@"Write Started");

NSError *error = nil;

NSString *styleKey = AVVideoScalingModeResizeAspectFill;
if ([style isEqualToString:@"fit"]) {
    styleKey = AVVideoScalingModeResizeAspect;
}

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:fullPath fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
AVAsset *avAsset = [[AVURLAsset alloc] initWithURL:path options:nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:width], AVVideoWidthKey,
                               [NSNumber numberWithInt:height], AVVideoHeightKey,
                               styleKey, AVVideoScalingModeKey,
                               nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
NSError *aerror = nil;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&aerror];
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0];
videoWriterInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
[reader addOutput:asset_reader_output];
//audio setup

AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeAudio
                                        outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:avAsset error:&error];
AVAssetTrack* audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];

[audioReader addOutput:readerOutput];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
 ^{
     while ([videoWriterInput isReadyForMoreMediaData]) {
         CMSampleBufferRef sampleBuffer;
         if ([reader status] == AVAssetReaderStatusReading &&
             (sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {

             BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
             CFRelease(sampleBuffer);

             if (!result) {
                 [reader cancelReading];
                 break;
             }
         } else {
             [videoWriterInput markAsFinished];

             switch ([reader status]) {
                 case AVAssetReaderStatusReading:
                     // the reader has more for other tracks, even if this one is done
                     break;
                 case AVAssetReaderStatusFailed:
                     [videoWriter cancelWriting];
                     break;
                 case AVAssetReaderStatusCompleted:
                     // your method for when the conversion is done
                     // should call finishWriting on the writer
                     //hook up audio track
                     [audioReader startReading];
                     [videoWriter startSessionAtSourceTime:kCMTimeZero];
                     dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
                     [audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
                      {
                          NSLog(@"Request");
                          NSLog(@"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
                          while (audioWriterInput.readyForMoreMediaData) {
                              CMSampleBufferRef nextBuffer;
                              if ([audioReader status] == AVAssetReaderStatusReading &&
                                  (nextBuffer = [readerOutput copyNextSampleBuffer])) {
                                  NSLog(@"Ready");
                                  if (nextBuffer) {
                                      NSLog(@"NextBuffer");
                                      [audioWriterInput appendSampleBuffer:nextBuffer];
                                  }
                              }else{
                                  [audioWriterInput markAsFinished];
                                  switch ([audioReader status]) {
                                      case AVAssetReaderStatusCompleted:
                                          [videoWriter finishWritingWithCompletionHandler:^{
                                              if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted) {
                                                  NSLog(@"Asset written");
                                                  NSLog(@"New Asset Orienatation: %d", [self orientationForTrack:asset]);
                                                  [self checkFileExists:fullPath];
                                                  [self getVideoProperties:fullPath];
                                                  self.lastVideo = fullPath;
                                                  //[self showDocumentsContents];
                                                  self.libraryVideo = fullPath;
                                              } else {
                                              }
                                          }];
                                          break;
                                  }
                              }
                          }

                      }
                      ];
                     break;
             }

             break;
         }
     }
 }
 ];
NSLog(@"Write Ended");
}

これはうまく機能しているように見え、H264 (mp4) 形式でアスペクト フィルまたは 640x480 に適合する機能を備えているため、私が望むとおりに見えます。MPMoviePlayerController で再生すると、ビデオがランドスケープ フォーマットになっているように見えるようになりました。

次に、2 つのビデオを結合しようとしています。

- (void)joinVideo:(id)sender {
if ((self.libraryVideo != nil) && (self.recordVideo != nil)) {
    NSString *libraryPath = [self.libraryVideo path];
    NSString *outputPath = [libraryPath stringByReplacingOccurrencesOfString:@".mp4" withString:@"-joined.mp4"];
    NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
    NSLog(@"Can Merge Video");
    NSMutableArray *audioTracks = [NSMutableArray array];
    NSMutableArray *videoTracks = [NSMutableArray array];
    NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
    AVURLAsset *asset2 = [AVURLAsset URLAssetWithURL:self.recordVideo options:options];
    [videoTracks addObjectsFromArray:[asset2 tracksWithMediaType:AVMediaTypeVideo]];
    [audioTracks addObjectsFromArray:[asset2 tracksWithMediaType:AVMediaTypeAudio]];
    NSLog(@"Asset 2 Orienatation: %d", [self orientationForTrack:asset2]);
    AVURLAsset *asset1 = [AVURLAsset URLAssetWithURL:self.libraryVideo options:options];
    [videoTracks addObjectsFromArray:[asset1 tracksWithMediaType:AVMediaTypeVideo]];
    [audioTracks addObjectsFromArray:[asset1 tracksWithMediaType:AVMediaTypeAudio]];
    NSLog(@"Asset 1 Orienatation: %d", [self orientationForTrack:asset1]);

    AVMutableComposition *composition = [[AVMutableComposition alloc] init];

    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1,30);
    videoComposition.renderScale = 1.0;

    if ([audioTracks count] > 0) {
        AVMutableCompositionTrack * audioTrackComposition = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

        [audioTracks enumerateObjectsUsingBlock:^(AVAssetTrack *track, NSUInteger idx, BOOL *stop) {
            [audioTrackComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, track.timeRange.duration)
                                           ofTrack:track
                                            atTime:kCMTimeZero
                                             error:nil];
        }];
    }

    AVMutableCompositionTrack *videoTrackComposition = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                                preferredTrackID:kCMPersistentTrackID_Invalid];

    [videoTracks enumerateObjectsUsingBlock:^(AVAssetTrack *track, NSUInteger idx, BOOL *stop) {
        [videoTrackComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, track.timeRange.duration)
                                       ofTrack:track
                                        atTime:kCMTimeZero
                                         error:nil];
    }];

    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrackComposition];

    AVAssetTrack *sourceVideoTrack = [[asset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI/2);
    CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);

    [videoTrackComposition setPreferredTransform:sourceVideoTrack.preferredTransform];
    [layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];

    instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
    videoComposition.instructions = [NSArray arrayWithObject: instruction];

    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition
                                                                           presetName:AVAssetExportPresetPassthrough];
    exportSession.outputFileType = AVFileTypeMPEG4;
    exportSession.shouldOptimizeForNetworkUse = YES;
    exportSession.outputURL = outputURL;

    [exportSession exportAsynchronouslyWithCompletionHandler:^ {
        switch (exportSession.status) {
            case AVAssetExportSessionStatusFailed: {
                NSLog(@"Join Failed");
                break;
            }
            case AVAssetExportSessionStatusCompleted: {
                NSLog(@"Join Completed");
                [self checkFileExists:outputURL];
                [self getVideoProperties:outputURL];
                self.lastVideo = outputURL;
                break;
            }
            case AVAssetExportSessionStatusCancelled: {
                NSLog(@"Join Cancelled");
                break;
            }
            default:
                break;
        }
    }];
}
}

これにより、ビデオが正常に結合されます。ただし、ビデオ B は正しく見えますが、ビデオ A は 90 度回転しています。

このマージでビデオ A が真の横向きビデオ 640x480 として扱われない理由を突き止めるために、私は多くの時間を費やしました。ビデオ A で別の操作を実行して、preferredTransform の変更を強制しようとしましたが、何もしないようです。また、元の AVAssetWriterInput に優先変換を設定しても影響はないようです。

本当に機能する何かのアイデアはありますか? 私が試したことはすべて、ビデオ A も 90 度回転し、マージ時に引き伸ばされてしまいます。

フォト ライブラリからの入力の方向を強制する方法はありますか?

ありがとう!

4

0 に答える 0