0

私はまさに私ができる方法だろう。オーディオキューでサウンドを録音します。プロジェクトがあり、プロジェクトに必要なすべてのフレームワークを追加しました(これは問題ではありません)。変更したファイルは2つだけです。

  • ViewController.h
  • ViewController.m

始める前に:ファイルの種類(void)または(IBAction)についてはわかりません(テストできません)。

これが私のViewController.hのソースコードです

#import <UIKit/UIKit.h>
//#import <AudioToolbox/AudioQueue.h>  //(don't know to use that) 
//#import <AudioToolbox/AudioFile.h>   //(don't know to use that)
#import <AudioUnit/AudioUnit.h>
#import <AudioToolbox/AudioToolbox.h>

#define NUM_BUFFERS 3
#define SECONDS_TO_RECORD 10

typedef struct
{
    AudioStreamBasicDescription  dataFormat;
    AudioQueueRef                queue;
    AudioQueueBufferRef          buffers[NUM_BUFFERS];
    AudioFileID                  audioFile;
    SInt64                       currentPacket;
    bool                         recording;
} RecordState;

typedef struct
{
    AudioStreamBasicDescription  dataFormat;
    AudioQueueRef                queue;
    AudioQueueBufferRef          buffers[NUM_BUFFERS];
    AudioFileID                  audioFile;
    SInt64                       currentPacket;
    bool                         playing;
} PlayState;



@interface ViewController : UIViewController{
    IBOutlet UILabel* labelStatus;
    IBOutlet UIButton* buttonRecord;
    IBOutlet UIButton* buttonPlay;
    RecordState recordState;
    PlayState playState;
    CFURLRef fileURL;
}

- (BOOL)getFilename:(char*)buffer maxLenth:(int)maxBufferLength;
- (void)setupAudioFormat:(AudioStreamBasicDescription*)format;
- (void)recordPressed:(id)sender;
- (void)playPressed:(id)sender;
- (IBAction)startRecording;
- (IBAction)stopRecording;
- (IBAction)startPlayback;
- (IBAction)stopPlayback;

これが私のViewController.mのソースコードです

(関数startPlaybackコメントにエラーがあります)

#import "ViewController.h"
@interface ViewController ()
@end

@implementation ViewController

void AudioInputCallback(
                    void *inUserData,
                    AudioQueueRef inAQ,
                    AudioQueueBufferRef inBuffer,
                    const AudioTimeStamp *inStartTime,
                    UInt32 inNumberPacketDescriptions,
                    const AudioStreamPacketDescription *inPacketDescs)
{

    RecordState* recordState = (RecordState*)inUserData;

    if(!recordState->recording)
    {
        printf("Not recording, returning\n");
    }

    //if(inNumberPacketDescriptions == 0 && recordState->dataFormat.mBytesPerPacket != 0)
    //{
    //    inNumberPacketDescriptions = inBuffer->mAudioDataByteSize / recordState->dataFormat.mBytesPerPacket;
    //}

    printf("Writing buffer %lld\n", recordState->currentPacket);
    OSStatus status = AudioFileWritePackets(recordState->audioFile,
                                        false,
                                        inBuffer->mAudioDataByteSize,
                                        inPacketDescs,
                                        recordState->currentPacket,
                                        &inNumberPacketDescriptions,
                                        inBuffer->mAudioData);
    if(status == 0)
    {
        recordState->currentPacket += inNumberPacketDescriptions;
    }

    AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}

void AudioOutputCallback(
                     void* inUserData,
                     AudioQueueRef outAQ,
                     AudioQueueBufferRef outBuffer)
{
    PlayState* playState = (PlayState*)inUserData;
    if(!playState->playing)
    {
        printf("Not playing, returning\n");
        return;
    }

    printf("Queuing buffer %lld for playback\n", playState->currentPacket);

    AudioStreamPacketDescription* packetDescs = NULL;

    UInt32 bytesRead;
    UInt32 numPackets = 8000;
    OSStatus status;
    status = AudioFileReadPackets(
                              playState->audioFile,
                              false,
                              &bytesRead,
                              packetDescs,
                              playState->currentPacket,
                              &numPackets,
                              outBuffer->mAudioData);

    if(numPackets)
    {
        outBuffer->mAudioDataByteSize = bytesRead;
        status = AudioQueueEnqueueBuffer(
                                     playState->queue,
                                     outBuffer,
                                     0,
                                     packetDescs);

        playState->currentPacket += numPackets;
    }
    else
    {
        if(playState->playing)
        {
            AudioQueueStop(playState->queue, false);
            AudioFileClose(playState->audioFile);
            playState->playing = false;
        }

        AudioQueueFreeBuffer(playState->queue, outBuffer);
    }    
}

- (void)setupAudioFormat:(AudioStreamBasicDescription*)format
{
    format->mSampleRate = 8000.0;
    format->mFormatID = kAudioFormatLinearPCM;
    format->mFramesPerPacket = 1;
    format->mChannelsPerFrame = 1;
    format->mBytesPerFrame = 2;
    format->mBytesPerPacket = 2;
    format->mBitsPerChannel = 16;
    format->mReserved = 0;
    format->mFormatFlags = kLinearPCMFormatFlagIsBigEndian |
    kLinearPCMFormatFlagIsSignedInteger |
    kLinearPCMFormatFlagIsPacked;
}

- (void)recordPressed:(id)sender
{
    if(!playState.playing)
    {
        if(!recordState.recording)
        {
            printf("Starting recording\n");
            [self startRecording];
        }
        else
        {
            printf("Stopping recording\n");
            [self stopRecording];
        }
    }
    else
    {
        printf("Can't start recording, currently playing\n");
    }
}

- (void)playPressed:(id)sender
{
    if(!recordState.recording)
    {
        if(!playState.playing)
        {
            printf("Starting playback\n");
            [self startPlayback];
        }
        else
        {
            printf("Stopping playback\n");
            [self stopPlayback];
        }
    }
}

- (IBAction)startRecording
{
    [self setupAudioFormat:&recordState.dataFormat];

    recordState.currentPacket = 0;

    OSStatus status;
    status = AudioQueueNewInput(&recordState.dataFormat,
                                AudioInputCallback,
                                &recordState,
                                CFRunLoopGetCurrent(),
                                kCFRunLoopCommonModes,
                                0,
                                &recordState.queue);

    if(status == 0)
    {
        for(int i = 0; i < NUM_BUFFERS; i++)
        {
            AudioQueueAllocateBuffer(recordState.queue,
                                     16000, &recordState.buffers[i]);
            AudioQueueEnqueueBuffer(recordState.queue,
                                    recordState.buffers[i], 0, NULL);
        }

        status = AudioFileCreateWithURL(fileURL,
                                        kAudioFileAIFFType,
                                        &recordState.dataFormat,
                                        kAudioFileFlags_EraseFile,
                                        &recordState.audioFile);
        if(status == 0)
        {
            recordState.recording = true;
            status = AudioQueueStart(recordState.queue, NULL);
            if(status == 0)
            {
                labelStatus.text = @"Recording";
            }
        }
    }

    if(status != 0)
    {
        [self stopRecording];
        labelStatus.text = @"Record Failed";
    }
}

- (IBAction)stopRecording
{
    recordState.recording = false;

    AudioQueueStop(recordState.queue, true);
    for(int i = 0; i < NUM_BUFFERS; i++)
    {
        AudioQueueFreeBuffer(recordState.queue,
                             recordState.buffers[i]);
    }

    AudioQueueDispose(recordState.queue, true);
    AudioFileClose(recordState.audioFile);
    labelStatus.text = @"Idle";
}

- (IBAction)startPlayback
{
    playState.currentPacket = 0;

    [self setupAudioFormat:&playState.dataFormat];

    OSStatus status;


    // I get here an error
    // Use of undeclared identifier 'fsRdPerm'
    // How to fix that?
    status = AudioFileOpenURL(fileURL, fsRdPerm, kAudioFileAIFFType, &playState.audioFile);


    if(status == 0)
    {
        status = AudioQueueNewOutput(
                                     &playState.dataFormat,
                                     AudioOutputCallback,
                                     &playState,
                                     CFRunLoopGetCurrent(),
                                     kCFRunLoopCommonModes,
                                     0,
                                     &playState.queue);

        if(status == 0)
        {
            playState.playing = true;
            for(int i = 0; i < NUM_BUFFERS && playState.playing; i++)
            {
                if(playState.playing)
                {
                    AudioQueueAllocateBuffer(playState.queue, 16000, &playState.buffers[i]);
                    AudioOutputCallback(&playState, playState.queue, playState.buffers[i]);
                }
            }

            if(playState.playing)
            {
                status = AudioQueueStart(playState.queue, NULL);
                if(status == 0)
                {
                    labelStatus.text = @"Playing";
                }
            }
        }
    }

    if(status != 0)
    {
        [self stopPlayback];
        labelStatus.text = @"Play failed";
    }
}

- (void)stopPlayback
{
    playState.playing = false;

    for(int i = 0; i < NUM_BUFFERS; i++)
    {
        AudioQueueFreeBuffer(playState.queue, playState.buffers[i]);
    }

    AudioQueueDispose(playState.queue, true);
    AudioFileClose(playState.audioFile);
}

- (BOOL)getFilename:(char*)buffer maxLenth:(int)maxBufferLength
{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
                                                         NSUserDomainMask, YES);
    NSString* docDir = [paths objectAtIndex:0];
    NSString* file = [docDir stringByAppendingString:@"/recording.aif"];
    return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}

- (void)viewDidLoad
{
    [super viewDidLoad];
        // Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
        // Dispose of any resources that can be recreated.
}

@end

プロジェクトを使用できるように、これを修正する方法がわかりません。関数startPlaybackをコメントアウトすると、次のエラーが発生します。

Ld / Users / NAME / Library / Developer / Xcode / DerivedData / recorder_test2-gehymgoneospsldgfpxnbjdapebu / Build / Products / Debug-iphonesimulator / recorder_test2.app / recorder_test2 normal i386 cd / Users / NAME / Desktop / recorder_test2 setenv IPHONEOS_DEPLOY /Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/usr/bin:/Applications/Xcode.app/Contents/Developer/usr/bin:/usr/bin:/bin:/usr/sbin:/ sbin "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/clang -arch i386 -isysroot /Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/ iPhoneSimulator6.0。sdk -L / Users / NAME / Library / Developer / Xcode / DerivedData / recorder_test2-gehymgoneospsldgfpxnbjdapebu / Build / Products / Debug-iphonesimulator -F / Users / NAME / Library / Developer / Xcode / DerivedData / recorder_test2-gehymgoneospsldgfpxnbj Debug-iphonesimulator -filelist / Users / NAME / Library / Developer / Xcode / DerivedData / recorder_test2-gehymgoneospsldgfpxnbjdapebu / Build / Intermediates / recorder_test2.build / Debug-iphonesimulator / recorder_test2.build / Objects-normal / i386 / recorder_test2.LinkFileList objc_abi_version -Xlinker 2 -fobjc-arc -fobjc-link-runtime -Xlinker -no_implicit_dylibs -mios-simulator-version-min=6。0 -framework AudioToolbox -framework AudioUnit -framework CoreAudio -framework UIKit -framework Foundation -framework CoreGraphics -o / Users / NAME / Library / Developer / Xcode / DerivedData / recorder_test2-gehymgoneospsldgfpxnbjdapebu / Build / Products / Debug-iphonesimulator / recorder_test2.app / recorder_test2

ld:フレームワークが見つかりませんAudioUnit clang:エラー:リンカーコマンドが終了コード1で失敗しました(呼び出しを確認するには-vを使用してください)

罪状認否は2つのソースファイルを使用し、自分でテストして助けてくれます。

4

2 に答える 2

2

プロジェクト設定にAudioUnitを追加し、それへの正しいパスがあることを確認してください。

ここに画像の説明を入力してください

于 2012-10-18T12:31:00.773 に答える
0

プロジェクトからAudioUnit.frameworkを削除し、に置き換えfsRdPermますkAudioFileReadPermission

長い話:

グーグルを長い間旅した後、証拠を見つけることができませんでしたがfsRdPerm、iOS 6のオーディオフレームワークのいずれにももう存在しないとほぼ確信しています。iOS6シミュレーターで検索したところ、表示されるだけです。レガシーフレームワークであるCarbonCore.frameworkで、したがって古い:

pwds2622:Frameworks mac$ pwd
/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator6.0.sdk/System/Library/Frameworks
s2622:Frameworks mac$ grep -sr fsRdPerm .
./CoreServices.framework/Frameworks/CarbonCore.framework/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h:  fsRdPerm                      = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h:  fsRdPerm                      = 0x01,

kAudioFileReadPermissionの代わりに使用することを提案するフォーラムの投稿を見つけましたfsRdPerm。これは機能し、実際、のドキュメントにkAudioFileReadPermissionは、これは「AudioFileOpenURLおよびAudioFileOpen関数で使用するためのフラグ」の1つであると記載されています。詳細については、オーディオファイルのアクセス許可フラグを参照してください。

于 2012-12-03T20:22:19.470 に答える