3

過去 2 週間の問題に直面しています。実際、私はiPadアプリに取り組んでいます。注釈を付けて、その注釈の画面記録も行いたいです。注釈部分は正常に動作していますが、記録を開始すると問題が発生します。問題は、滑らかさが失われ、画面の記録中にラグが発生することです。画面録画には AVAsset Writer を使用しています。コードは注釈と画面録画の両方で問題ありません....しかし、どこに問題があるのか​​ わかりませんか??

スクリーンショットのサイズは (1050,650) です

この問題を解決するには、Grand Central Dispatch を使用する必要がありますか? 誰でも私の問題を解決するのを手伝ってもらえますか.....

助けてください....

マイコード

// For Annotation

        - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event 
   {
       mouseSwiped = NO;
    UITouch *touch = [touches anyObject];

    if ([touch tapCount] == 2) 
    {
        drawImage.image = nil;  //Double click to undo drawing.
        return;
    }

    lastPoint = [touch locationInView:self.view];
    lastPoint.y -= 20;

    }
    - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event 
    {

    mouseSwiped = YES;
    UITouch *touch = [touches anyObject];
    CGPoint currentPoint = [touch locationInView:self.view];
    currentPoint.y -= 20;
//    UIGraphicsBeginImageContext(canvasView.frame.size);
UIGraphicsBeginImageContext(drawImage.frame.size);
    [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)];
    CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
    CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 10.0);
    CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
    CGContextBeginPath(UIGraphicsGetCurrentContext());
    CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
    CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
    CGContextStrokePath(UIGraphicsGetCurrentContext());
    drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    lastPoint = currentPoint;

    }
    - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event 
    {

    UITouch *touch = [touches anyObject];
    if ([touch tapCount] == 2) 
    {
        drawImage.image = nil;
        return;
    }


    if(!mouseSwiped) 
    {
        UIGraphicsBeginImageContext(drawImage.frame.size);
        [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)];
        CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
        CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
        CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
        CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
        CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
        CGContextStrokePath(UIGraphicsGetCurrentContext());
        CGContextFlush(UIGraphicsGetCurrentContext());
        drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext();
    }

   }



    //For Screen Recording

    #define FRAME_WIDTH 1024            
    #define FRAME_HEIGHT 650
    #define TIME_SCALE 600

    - (UIImage*)screenshot
   {

    UIGraphicsBeginImageContext(drawImage.frame.size);
[self.view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
    return viewImage;
   }


    -(NSURL*) pathToDocumentsDirectory {


NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mov"];
outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
    NSError* error;
    if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
        NSLog(@"Could not delete old recording file at path:  %@", outputPath);
    }
}

[outputPath release];
return [outputURL autorelease];

   }


   -(void) writeSample: (NSTimer*) _timer 
   {


   if (assetWriterInput.readyForMoreMediaData) {
    // CMSampleBufferRef sample = nil;

    CVReturn cvErr = kCVReturnSuccess;

    // get screenshot image!
    CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
    NSLog (@"made screenshot");

    // prepare the pixel buffer
    CVPixelBufferRef pixelBuffer = NULL;
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(image),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    // calculate the time
    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];

    if (appended) {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } else {
        NSLog (@"failed to append");
        [self stopRecording];

    }
}



   }


    -(void) startRecording {


movieURL = [self pathToDocumentsDirectory];
NSLog(@"path=%@",movieURL);
movieError = nil;
[assetWriter release];

assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL 
                                        fileType: AVFileTypeQuickTimeMovie 
                                           error: &movieError];

[self writer];

// start writing samples to it

NSDate* start = [NSDate date];
frameRate=40.0f;                                                                                                                                                         
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
delayRemaining = (1.0 / self.frameRate) - processingSeconds;
[assetWriterTimer release];

assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:delayRemaining > 0.0 ? delayRemaining : 0.01
                                                    target:self
                                                  selector:@selector (writeSample:)
                                                  userInfo:nil
                                                   repeats:YES] ;

    }


   -(void)writer
   {

NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                          AVVideoCodecH264, AVVideoCodecKey,
                                          [NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey,
                                          [NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey,
                                          nil];

assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
                                                      outputSettings:assetWriterInputSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
[assetWriter addInput:assetWriterInput];

[assetWriterPixelBufferAdaptor release];
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor  alloc]
                                 initWithAssetWriterInput:assetWriterInput
                                 sourcePixelBufferAttributes:nil];
[assetWriter startWriting];

firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();
[assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)];

   }


   -(void) stopRecording {


[assetWriterTimer invalidate];
assetWriterTimer = nil;

[assetWriter finishWriting];
NSLog (@"finished writing");

   }
4

1 に答える 1

0

最も簡単な方法は、低いフレーム レートを試すことです。

于 2013-01-04T17:46:26.693 に答える