0

Snapchat に似たハイブリッド画像/ビデオ カメラで最近録画したビデオの再生に問題があります (たとえば、タップして写真を撮り、長押ししてビデオを録画し、ボタンを放して再生します)。

現在、動画ファイルを NSFileManager に保存しています。ログアウトすると、何かが保存されていることを確認しますが、電話でテストする必要があるため、ファイルを検査できません。

ログアウト時のファイルパス:

file:///var/mobile/Containers/Data/Application/7D86B14D-ACFF-4494-AD61-CBBD32DCA7A5/Documents/test.mov

ファイルマネージャーからアセットをロードしようとすると、ファイルを開けないというエラーが表示されてログアウトします。AVFoundation を使い始めたばかりなので、デバッグ時にどのような問題や考慮事項があるかわかりません。どんな洞察も大歓迎です、ありがとう!

参考にしたチュートリアル

参考にしたgithubリポジトリ

参照コード:

PlayerView.h (リファレンス)

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

@interface PlayerView : UIView

@property (nonatomic) AVPlayer *player;

- (void)setPlayer:(AVPlayer *)player;

@end

PlayerView.m (リファレンス)

#import "PlayerView.h"

@implementation PlayerView
+ (Class)layerClass {
    return [AVPlayerLayer class];
}
- (AVPlayer *)player {
    return [(AVPlayerLayer *)[self layer] player];
}
- (void)setPlayer:(AVPlayer *)player {
    [(AVPlayerLayer *)[self layer] setPlayer:player];
}
@end

HybridCameraViewController.h (リファレンス)

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

#import "PlayerView.h"

@interface HybridCameraViewController : UIViewController 

@property UIButton *button;
@property UIButton *saveButton;
@property UIImageView *previewView;

#define VIDEO_FILE @"test.mov"

@end

HybridCameraViewController.m (リファレンス)

#import "HybridCameraViewController.h"
static const NSString *ItemStatusContext;
@class PlayerView;
@interface HybridCameraViewController () <AVCaptureFileOutputRecordingDelegate>
@end

@implementation HybridCameraViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *imageOutput;
AVCaptureMovieFileOutput *movieOutput;
AVCaptureConnection *videoConnection;
AVPlayer *player;
AVPlayerItem *playerItem;
PlayerView *playerView;

- (void)viewDidLoad {
    [super viewDidLoad];
    [self testDevices];
    self.view.backgroundColor = [UIColor blackColor];

    //Image preview
    self.previewView = [[UIImageView alloc]initWithFrame:self.view.frame];
    self.previewView.backgroundColor = [UIColor whiteColor];
    self.previewView.contentMode = UIViewContentModeScaleAspectFill;
    self.previewView.hidden = YES;
    [self.view addSubview:self.previewView];

    //Playerback setup
    playerView = [[PlayerView alloc]initWithFrame:self.view.frame];
    playerView.backgroundColor = [UIColor redColor];
    [self syncUI];

    //Buttons
    self.button = [self createButtonWithTitle:@"REC" chooseColor:[UIColor redColor]];
    UILongPressGestureRecognizer *longPressRecognizer = [[UILongPressGestureRecognizer alloc]initWithTarget:self action:@selector(handleLongPressGesture:)];
    [self.button addGestureRecognizer:longPressRecognizer];
    [self.button addTarget:self action:@selector(captureImage) forControlEvents:UIControlEventTouchUpInside];

    self.saveButton = [self createSaveButton];
    [self.saveButton addTarget:self action:@selector(saveActions) forControlEvents:UIControlEventTouchUpInside];
}

- (void)viewWillAppear:(BOOL)animated {
    //Tests
    [self initializeAVItems];
    NSLog(@"%@", videoConnection);
    NSLog(@"%@", imageOutput.connections);
    NSLog(@"%@", imageOutput.description.debugDescription);
}

#pragma mark - AV initialization

- (void)initializeAVItems {
    //Start session, input
    session = [AVCaptureSession new];
    if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
        session.sessionPreset = AVCaptureSessionPresetHigh;
    }
    AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    NSError *error;
    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
    if ([session canAddInput:deviceInput]) {
        [session addInput:deviceInput];
    } else {
        NSLog(@"%@", error);
    }

    AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
    [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];

    //Layer preview
    CALayer *viewLayer = [[self view] layer];
    [viewLayer setMasksToBounds:YES];

    CGRect frame = self.view.frame;
    [previewLayer setFrame:frame];
    [viewLayer insertSublayer:previewLayer atIndex:0];

    //Image Output
    imageOutput = [AVCaptureStillImageOutput new];
    NSDictionary *imageOutputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
    imageOutput.outputSettings = imageOutputSettings;

    //Video Output
    movieOutput = [AVCaptureMovieFileOutput new];

    [session addOutput:movieOutput];
    [session addOutput:imageOutput];
    [session startRunning];
}

- (void)testDevices {
    NSArray *devices = [AVCaptureDevice devices];
    for (AVCaptureDevice *device in devices) {
        NSLog(@"Device name: %@", [device localizedName]);
        if ([device hasMediaType:AVMediaTypeVideo]) {
            if ([device position] == AVCaptureDevicePositionBack) {
                NSLog(@"Device position : back");
            }
            else {
                NSLog(@"Device position : front");
            }
        }
    }
}

#pragma mark - Image capture

- (void)captureImage {
    AVCaptureConnection *videoConnection = nil;
    for (AVCaptureConnection *connection in imageOutput.connections) {
        for (AVCaptureInputPort *port in [connection inputPorts]) {
            if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
                videoConnection = connection;
                break;
            }
        }
        if (videoConnection) {
            break;
        }
    }
    NSLog(@"Requesting capture from: %@", imageOutput);
    [imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
        if (imageDataSampleBuffer != NULL) {
            NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
            UIImage *image = [UIImage imageWithData:imageData];
            self.previewView.image = image;
            self.previewView.hidden = NO;
        }
    }];
    [self saveButtonFlyIn:self.saveButton];
}

#pragma mark - Video capture

- (void)captureVideo {
    NSLog(@"%@", movieOutput.connections);
    [[NSFileManager defaultManager] removeItemAtURL:[self outputURL] error:nil];
    videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:movieOutput.connections];
    [movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
}

- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections {
    for (AVCaptureConnection *connection in connections) {
        for (AVCaptureInputPort *port in [connection inputPorts]) {
            if ([[port mediaType] isEqual:mediaType]) {
                return connection;
            }
        }
    }
    return nil;
}

#pragma mark - Show Last Recording

- (void)presentRecording {
    NSLog(@"unplaying");
    NSLog(@"%@",[self outputURL]);
}

- (IBAction)loadAssetFromFile {
    AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[self outputURL] options:nil];
    NSString *tracksKey = @"tracks";
    [asset loadValuesAsynchronouslyForKeys:@[tracksKey] completionHandler:^{
         dispatch_async(dispatch_get_main_queue(),^{
             NSError *error;
             AVKeyValueStatus status = [asset statusOfValueForKey:tracksKey
                                                            error:&error];

             if (status == AVKeyValueStatusLoaded) {
                 playerItem = [AVPlayerItem playerItemWithAsset:asset];
                 [playerItem addObserver:self forKeyPath:@"status"
                                      options:NSKeyValueObservingOptionInitial
                                      context:&ItemStatusContext];
                 [[NSNotificationCenter defaultCenter] addObserver:self
                                                          selector:@selector(playerItemDidReachEnd:)
                                                              name:AVPlayerItemDidPlayToEndTimeNotification
                                                            object:playerItem];
                 player = [AVPlayer playerWithPlayerItem:playerItem];

                 [playerView setPlayer:player];
             }
             else {
                 NSLog(@"The asset's tracks were not loaded:\n%@", [error localizedDescription]);
             }
         });
    }];
}

- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
    if (context == &ItemStatusContext) {
        dispatch_async(dispatch_get_main_queue(),^{
            [self syncUI];
        });
        return;
    }
    [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
    return;
}

- (void)playerItemDidReachEnd:(NSNotification *)notification {
    [player seekToTime:kCMTimeZero];
}

- (void)syncUI {
    if ((player.currentItem != nil) &&
        ([player.currentItem status] == AVPlayerItemStatusReadyToPlay)) {
        self.button.enabled = YES;
    }
    else {
        self.button.enabled = NO;
    }
}

#pragma mark - AVCaptureFileOutputRecordingDelegate

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
    if (!error) {
        NSLog(@"Success!!!!");

    } else {
        NSLog(@"Error: %@", [error localizedDescription]);
    }
}

#pragma mark - Recoding Destination URL

- (NSURL *)outputURL {
    NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
    NSString *filePath = [documentsDirectory stringByAppendingPathComponent:VIDEO_FILE];
    return [NSURL fileURLWithPath:filePath];
}

#pragma mark - Buttons

- (void)handleLongPressGesture:(UILongPressGestureRecognizer *)recognizer {
    if (recognizer.state == UIGestureRecognizerStateBegan) {
        NSLog(@"Press");
        self.button.backgroundColor = [UIColor greenColor];
        [self captureVideo];
    }
    if (recognizer.state == UIGestureRecognizerStateEnded) {
        NSLog(@"Unpress");
        self.button.backgroundColor = [UIColor redColor];
        [movieOutput stopRecording];
        [self performSelector:@selector(loadAssetFromFile)];
        [player play];
    }
}

- (UIButton *)createButtonWithTitle:(NSString *)title chooseColor:(UIColor *)color {
    UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(self.view.center.x, self.view.frame.size.height - 100, 85, 85)];
    button.layer.cornerRadius = button.bounds.size.width / 2;
    button.backgroundColor = color;
    button.tintColor = [UIColor whiteColor];
    [self.view addSubview:button];
    return button;
}

- (UIButton *)createSaveButton {
    UIButton *button = [[UIButton alloc]initWithFrame:CGRectMake(self.view.frame.size.width, self.view.frame.size.height - 100, 85, 85)];
    button.layer.cornerRadius = button.bounds.size.width / 2;
    button.backgroundColor = [UIColor greenColor];
    button.tintColor = [UIColor whiteColor];
    button.userInteractionEnabled = YES;
    [button setTitle:@"save" forState:UIControlStateNormal];
    [self.view addSubview:button];
    return button;
}

- (void)saveButtonFlyIn:(UIButton *)button {
    CGRect movement = button.frame;
    movement.origin.x = self.view.frame.size.width - 100;

    [UIView animateWithDuration:0.2 animations:^{
        button.frame = movement;
    }];
}

- (void)saveButtonFlyOut:(UIButton *)button {
    CGRect movement = button.frame;
    movement.origin.x = self.view.frame.size.width;

    [UIView animateWithDuration:0.2 animations:^{
        button.frame = movement;
    }];
}

#pragma mark - Save actions

- (void)saveActions {
    [self saveButtonFlyOut:self.saveButton];
    self.previewView.image = nil;
    self.previewView.hidden = YES;
}

@end
4

1 に答える 1

0

アプローチは非常に複雑でした。あなたが間違っていたのは、1) URL を適切にロードしていないことと、2) メイン ビューに「プレーヤー レイヤー」サブレイヤーを追加していないことです。

成功した再生の例を次に示します。

   //TestURL
    self.videoURL = [NSURL URLWithString:(NSString *)[self.selectedVideo objectForKey:@"source"]];

    //Video
    self.player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc]initWithAsset:[AVAsset assetWithURL:self.videoURL]]];

    //Player layer
    self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
    self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
    self.playerLayer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height / 3);
    [self.view.layer addSublayer:self.playerLayer];
    [self.player play];

保存した URL に URL をリダイレクトするだけです。

self.videoURL = [self outputURL];
于 2015-07-13T13:15:22.967 に答える