私はしばらくこれに頭を悩ませてきましたが、何が間違っているのかわかりません。私はビデオファイルを読みたいです - フレームを処理してから再出力します:
私が直面している問題は、何らかの理由で [self.audioOutput copyNextSampleBuffer]; への呼び出しがブロックされ、決して完了しないことです!!
-(void) initializeAtPath:(NSURL*) inputPath withOutputPath:(NSURL*) outputPath{
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:352], AVVideoWidthKey,
[NSNumber numberWithInt:288], AVVideoHeightKey,
nil];
AVAsset * movieAsset = [AVAsset assetWithURL:inputPath];
self.reader = [[AVAssetReader alloc] initWithAsset:movieAsset error:nil];
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey,nil];
self.frameOutput= [[AVAssetReaderTrackOutput alloc] initWithTrack:[[movieAsset tracks] objectAtIndex:0] outputSettings:outputSettings];
if([self.reader canAddOutput:self.frameOutput]){
[self.reader addOutput:self.frameOutput];
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *audioSettings =[NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, [NSNumber numberWithFloat:44100.0], AVSampleRateKey, [NSNumber numberWithInt:2], AVNumberOfChannelsKey, [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey, [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey, [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved, [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey, [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey, nil];
NSArray * audioTracks = [movieAsset tracksWithMediaType:AVMediaTypeAudio];
self.audioOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[audioTracks lastObject] outputSettings:nil];
if([self.reader canAddOutput:self.audioOutput]){
NSLog(@"ADDED");
[self.reader addOutput:self.audioOutput];
}
self.writer = [[AVAssetWriter alloc] initWithURL:outputPath fileType:AVFileTypeQuickTimeMovie error:nil];
self.writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
self.writerInput.expectsMediaDataInRealTime = YES;
[self.writerInput setTransform:CGAffineTransformMakeRotation(M_PI_2)];
self.audioWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
self.audioWriterInput.expectsMediaDataInRealTime = YES;
[self.writer addInput:self.audioWriterInput];
[self.writer addInput:self.writerInput];
self.adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.writerInput sourcePixelBufferAttributes:nil];
}
-(void) finish:(bool) success{
[self.writer finishWriting];
if(success && [self.delegate respondsToSelector:@selector(didFinishWritingMovieAtUrl:sender:)]){
dispatch_async(dispatch_get_main_queue(),^{
[self.delegate didFinishWritingMovieAtUrl:self.writer.outputURL sender:self];
});
}
if(!success && [self.delegate respondsToSelector:@selector(didFailToWriteMovieAtUrl:sender:)]){
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate didFailToWriteMovieAtUrl:self.writer.outputURL sender:self];
});
}
}
-(void) processAudioUsingQueue{
while([self.audioWriterInput isReadyForMoreMediaData]){
CMSampleBufferRef audioBuffer;
if (self.reader.status == AVAssetReaderStatusReading) {
NSLog(@"A");
audioBuffer = [self.audioOutput copyNextSampleBuffer];
NSLog(@"B");
bool result = [self.audioWriterInput appendSampleBuffer:audioBuffer];
if(!result){
[self.audioWriterInput markAsFinished];
[self.reader cancelReading];
break;
}
}else{
[self.audioWriterInput markAsFinished];
break;
}
}
[self finish:YES];
}
-(void) processFramesUsingQueue:(dispatch_queue_t) queue{
[self.writerInput requestMediaDataWhenReadyOnQueue:queue usingBlock:^{
while([self.writerInput isReadyForMoreMediaData]){
CMSampleBufferRef sampleBuffer;
if (self.reader.status == AVAssetReaderStatusReading && (sampleBuffer = [self.frameOutput copyNextSampleBuffer])) {
bool result = [self renderFrame:sampleBuffer];
if(!result){
[self.reader cancelReading];
break;
}
}else{
[self.writerInput markAsFinished];
switch(self.reader.status){
case AVAssetReaderStatusReading:
[self performSelectorOnMainThread:@selector(processAudioUsingQueue) withObject:nil waitUntilDone:YES];
break;
case AVAssetReaderStatusCompleted:
[self finish:YES];
break;
case AVAssetReaderStatusCancelled:
case AVAssetReaderStatusFailed:
[self finish:NO];
[self.writer cancelWriting];
break;
}
break;
}
}
}];
}
-(void) beginProcessingMovieAtPath:(NSURL*) inputPath withOutputPath:(NSURL*) outputPath startingTimeStamp:(CMTime) startTime{
if(!self.costumeViews || !self.costumeFrames) NSLog(@"Warning... missing parameters!");
[self initializeAtPath:inputPath withOutputPath:outputPath];
[self.reader startReading];
[self.writer startWriting];
self.startFrameTime = startTime;
[self.writer startSessionAtSourceTime:self.startFrameTime];
// start thread to pull video data:
dispatch_queue_t processvideo = dispatch_queue_create("process_queue", NULL);
[self processFramesUsingQueue:processvideo];
}