私はチュートリアルアプリケーションを実行していますiOS screen video
。ビューでは正常に動作していますが、ビデオの実行中に画面のビデオを録画する必要がありUIWebview
ますMPMoviePlayer
。 .しかし、プレーヤーを起動した後、黒い画面しか表示されません。
私はこのリンクをたどった:
http://developer.apple.com/library/ios/#qa/qa1703/_index.html
-(void) startRecording {
// create the AVAssetWriter
NSString *moviePath = [[self pathToDocumentsDirectory] stringByAppendingPathComponent:OUTPUT_FILE_NAME];
if ([[NSFileManager defaultManager] fileExistsAtPath:moviePath]) {
[[NSFileManager defaultManager] removeItemAtPath:moviePath error:nil];
}
NSURL *movieURL = [NSURL fileURLWithPath:moviePath];
NSError *movieError = nil;
[assetWriter release];
assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL
fileType: AVFileTypeQuickTimeMovie
error: &movieError];
NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey,
[NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey,
nil];
assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
outputSettings:assetWriterInputSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
[assetWriter addInput:assetWriterInput];
[assetWriterPixelBufferAdaptor release];
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:assetWriterInput
sourcePixelBufferAttributes:nil];
[assetWriter startWriting];
firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();
[assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)];
// start writing samples to it
[assetWriterTimer release];
assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:0.1
target:self
selector:@selector (writeSample:)
userInfo:nil
repeats:YES] ;
}
-(void) stopRecording {
[assetWriterTimer invalidate];
assetWriterTimer = nil;
[assetWriter finishWriting];
NSLog (@"finished writing");
}
- (UIImage*)screenshot
{
// Create a graphics context with the target size
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
CGSize imageSize = [[UIScreen mainScreen] bounds].size;
CGFloat imageScale = imageSize.width / FRAME_WIDTH;
if (NULL != UIGraphicsBeginImageContextWithOptions)
UIGraphicsBeginImageContextWithOptions(imageSize, NO, imageScale);
else
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();
// Iterate over every window from back to front
for (UIWindow *window in [[UIApplication sharedApplication] windows])
{
if (![window respondsToSelector:@selector(screen)] || [window screen] == [UIScreen mainScreen])
{
// -renderInContext: renders in the coordinate space of the layer,
// so we must first apply the layer's geometry to the graphics context
CGContextSaveGState(context);
// Center the context around the window's anchor point
CGContextTranslateCTM(context, [window center].x, [window center].y);
// Apply the window's transform about the anchor point
CGContextConcatCTM(context, [window transform]);
// Offset by the portion of the bounds left of and above the anchor point
CGContextTranslateCTM(context,
-[window bounds].size.width * [[window layer] anchorPoint].x,
-[window bounds].size.height * [[window layer] anchorPoint].y);
// Render the layer hierarchy to the current context
[[window layer] renderInContext:context];
// Restore the context
CGContextRestoreGState(context);
}
}
// Retrieve the screenshot image
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
-(void) writeSample: (NSTimer*) _timer {
if (assetWriterInput.readyForMoreMediaData) {
// CMSampleBufferRef sample = nil;
CVReturn cvErr = kCVReturnSuccess;
// get screenshot image!
CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
NSLog (@"made screenshot");
// prepare the pixel buffer
CVPixelBufferRef pixelBuffer = NULL;
CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
NSLog (@"copied image data");
cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
FRAME_WIDTH,
FRAME_HEIGHT,
kCVPixelFormatType_32BGRA,
(void*)CFDataGetBytePtr(imageData),
CGImageGetBytesPerRow(image),
NULL,
NULL,
NULL,
&pixelBuffer);
NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);
// calculate the time
CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
NSLog (@"elapsedTime: %f", elapsedTime);
CMTime presentationTime = CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);
// write the sample
BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
if (appended) {
NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
} else {
NSLog (@"failed to append");
[self stopRecording];
self.startStopButton.selected = NO;
}
}
}
どこ:
NSTimer *clockTimer;
NSTimer *assetWriterTimer;
AVMutableComposition *mutableComposition;
AVAssetWriter *assetWriter;
AVAssetWriterInput *assetWriterInput;
AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor;
CFAbsoluteTime firstFrameWallClockTime;