10 フレームの UIImage 配列があります。これを合計15秒の動画として書き出したい。そのため、これらの 10 フレームを 15 秒間ループする必要があります。各フレーム間に 0.2 秒の間隔が必要です。
配列のムービーを作成できましたが、CMTime の仕組みがわかりません。数字をいじってみましたが、望む結果が得られません。ムービーが短すぎる、再生速度が速すぎるなど...
私はこれを読みました:CMTimeとCMTimeMakeを理解しようとしていますが、それでも私には意味がありません...
これら 3 つの CMTime 変数が互いにどのように関連しているかを理解する必要があります
CMTime frameTime = CMTimeMake(1, 5);
CMTime lastTime = CMTimeMake(i, 5);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
これは私が使用しているコードです
- (void)writeImageAsMovie:(NSArray *)array toPath:(NSURL*)path size:(CGSize)size {
NSError *error = nil;
self.videoWriter = [[AVAssetWriter alloc] initWithURL:path
fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(self.videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:size.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:size.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:attributes];
NSParameterAssert(writerInput);
NSParameterAssert([self.videoWriter canAddInput:writerInput]);
[self.videoWriter addInput:writerInput];
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[self.videoWriter startWriting];
[self.videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = ([self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage] size:CGSizeMake(size.width, size.height)]);
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
int i = 1;
while (writerInput.readyForMoreMediaData)
{
CMTime frameTime = CMTimeMake(1, 15);
CMTime lastTime= CMTimeMake(i, 15);
CMTime presentTime= CMTimeAdd(lastTime, frameTime);
if (i >= [array count]) {
buffer = NULL;
}
else {
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage] size:size];
}
if (buffer) {
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
i++;
} else {
[writerInput markAsFinished];
[self.videoWriter finishWritingWithCompletionHandler:^{
NSLog (@"Done");
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
break;
}
}
}