XcodeプロジェクトでここにあるAVFoundationのサンプルコードを実装しようとしています: http://developer.apple.com/library/mac/#qa/qa1740/_index.html#//apple_ref/doc/uid/DTS40011007
プロジェクトで自動参照カウントが有効になっているため、私が行った唯一の変更は、autorelease、release、retain への参照を削除することでした。これらのビルド エラーを正常に解決したので、「アーキテクチャ x86_64 の未定義のシンボル」が表示されるようになりました。
Mountain Lion 10.8 で実行していますが、AVFoundation.h をヘッダー ファイルにインポートしてもエラーは発生しませんが、AV* シンボルが見つからないようです。
以下は、エラー ログ、.h、および .m コードです。より知識のある人が、問題がどこにあるのかを特定するのを手伝ってくれませんか?
ログ:
Undefined symbols for architecture x86_64:
"_AVCaptureSessionPresetMedium", referenced from:
-[Recorder screenRecording:] in screenAppDelegate.o
"_OBJC_CLASS_$_AVCaptureMovieFileOutput", referenced from:
objc-class-ref in screenAppDelegate.o
"_OBJC_CLASS_$_AVCaptureScreenInput", referenced from:
objc-class-ref in screenAppDelegate.o
"_OBJC_CLASS_$_AVCaptureSession", referenced from:
objc-class-ref in screenAppDelegate.o
ld: symbol(s) not found for architecture x86_64
clang: error: linker command failed with exit code 1 (use -v to see invocation)
.h
#import <Cocoa/Cocoa.h>
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
@interface screenAppDelegate : NSObject <NSApplicationDelegate>
@end
@interface Recorder : NSObject <AVCaptureFileOutputRecordingDelegate> {
@private AVCaptureSession *mSession;
AVCaptureMovieFileOutput *mMovieFileOutput;
NSTimer *mTimer;
}
-(void)screenRecording:(NSURL *)destPath;
@end
.m
#import "screenAppDelegate.h"
@implementation screenAppDelegate
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification
{
// Insert code here to initialize your application
}
@end
@implementation Recorder
-(void)screenRecording:(NSURL *)destPath
{
// Create a capture session
mSession = [[AVCaptureSession alloc] init];
// Set the session preset as you wish
mSession.sessionPreset = AVCaptureSessionPresetMedium;
// If you're on a multi-display system and you want to capture a secondary display,
// you can call CGGetActiveDisplayList() to get the list of all active displays.
// For this example, we just specify the main display.
CGDirectDisplayID displayId = kCGDirectMainDisplay;
// Create a ScreenInput with the display and add it to the session
AVCaptureScreenInput *input = [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
if (!input) {
//[mSession release];
mSession = nil;
return;
}
if ([mSession canAddInput:input])
[mSession addInput:input];
// Create a MovieFileOutput and add it to the session
mMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([mSession canAddOutput:mMovieFileOutput])
[mSession addOutput:mMovieFileOutput];
// Start running the session
[mSession startRunning];
// Delete any existing movie file first
if ([[NSFileManager defaultManager] fileExistsAtPath:[destPath path]])
{
NSError *err;
if (![[NSFileManager defaultManager] removeItemAtPath:[destPath path] error:&err])
{
NSLog(@"Error deleting existing movie %@",[err localizedDescription]);
}
}
// Start recording to the destination movie file
// The destination path is assumed to end with ".mov", for example, @"/users/master/desktop/capture.mov"
// Set the recording delegate to self
[mMovieFileOutput startRecordingToOutputFileURL:destPath recordingDelegate:self];
// Fire a timer in 5 seconds
mTimer = [NSTimer scheduledTimerWithTimeInterval:5 target:self selector:@selector(finishRecord:) userInfo:nil repeats:NO];
}
-(void)finishRecord:(NSTimer *)timer
{
// Stop recording to the destination movie file
[mMovieFileOutput stopRecording];
//[mTimer release];
mTimer = nil;
}
// AVCaptureFileOutputRecordingDelegate methods
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(@"Did finish recording to %@ due to error %@", [outputFileURL description], [error description]);
// Stop running the session
[mSession stopRunning];
// Release the session
//[mSession release];
mSession = nil;
}