1

AVAssetWriter を使用して Wave を Caf 形式に変換します。ウェーブファイルがあります。Core Audio形式に変換したいです。

これは私が試してきたコードで、どこにも行きません。すべてのフォーラムを検索しようとしましたが、誰もが見つけた実用的なコードはありません。

NSError *error = nil ;

NSDictionary *audioSetting = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithFloat:16000.0], AVSampleRateKey,
[ NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[ NSNumber numberWithInt:kAudioFormatAppleIMA4], AVFormatIDKey, nil ];

NSString *audioFilePath = filePath;
AVURLAsset * URLAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:audioFilePath] options:nil];

if (!URLAsset) return NO ;

AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:URLAsset error:&error];
if (error) return NO;

/*NSArray *tracks = [URLAsset tracksWithMediaType:AVMediaTypeAudio];
if (![tracks count]) return NO;

AVAssetReaderAudioMixOutput *audioMixOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:tracks
audioSettings :audioSetting];

AVAssetReaderOutput *audioOutput = [AVAssetReaderOutput ]
if (![assetReader canAddOutput:audioMixOutput]) return NO ;

[assetReader addOutput :audioMixOutput];
*/

if (![assetReader startReading]) return NO;

NSString *outPath = [filePath stringByDeletingPathExtension];
outPath = [outPath stringByAppendingPathExtension:@"caf"];

[[NSFileManager defaultManager] removeItemAtPath:outPath error:nil];
NSURL *outURL = [NSURL fileURLWithPath:outPath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:outURL
fileType:AVFileTypeCoreAudioFormat
error:&error];
if (error) return NO;

AVAssetWriterInput *assetWriterInput = [ AVAssetWriterInput assetWriterInputWithMediaType :AVMediaTypeAudio
outputSettings:audioSetting];
assetWriterInput. expectsMediaDataInRealTime = NO;

if (![assetWriter canAddInput:assetWriterInput]) return NO ;

[assetWriter addInput :assetWriterInput];

if (![assetWriter startWriting]) return NO;

[assetWriter startSessionAtSourceTime:kCMTimeZero ];

dispatch_queue_t queue = dispatch_queue_create( "assetWriterQueue", NULL );

[assetWriterInput requestMediaDataWhenReadyOnQueue:queue usingBlock:^{

NSLog(@"start");

while (1)
{
if ([assetWriterInput isReadyForMoreMediaData]) {

CMSampleBufferRef sampleBuffer = [audioMixOutput copyNextSampleBuffer];

if (sampleBuffer) {
[assetWriterInput appendSampleBuffer :sampleBuffer];
CFRelease(sampleBuffer);
} else {
[assetWriterInput markAsFinished];
break;
}
}
}

[assetWriter finishWritingWithCompletionHandler:^(void)
{
NSLog(@"CAF Completed");
}];

NSLog(@"finish");
}];
return YES;

誰かが正しい方向を指し示すことができれば、本当に役に立ちます。

4

2 に答える 2

1

これを試して、

NSString *wavFilePath = [[NSBundle mainBundle] pathForResource:@"sampleaudio" ofType:@"wav"];

NSURL *assetURL = [NSURL fileURLWithPath:wavFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];

NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
                                                           error:&assetError]
;
if (assetError) {
    NSLog (@"error: %@", assetError);
    return;
}

AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
                                          assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
                                          audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
    NSLog (@"can't add reader output... die!");
    return;
}
[assetReader addOutput: assetReaderOutput];

NSString *strcafFileName = [NSString stringWithFormat:@"%@.caf",[wavFilePath stringByDeletingPathExtension]];
NSString *cafFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:strcafFileName];

NSURL *exportURL = [NSURL fileURLWithPath:cafFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
                                                      fileType:AVFileTypeCoreAudioFormat
                                                         error:&assetError];
if (assetError)
{
    NSLog (@"error: %@", assetError);
    return;
}

AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
                                [NSNumber numberWithFloat:11025], AVSampleRateKey,
                                [NSNumber numberWithInt:2], AVNumberOfChannelsKey,
                                [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
                                [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
                                [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
                                [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
                                [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
                                nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
                                                                          outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
    [assetWriter addInput:assetWriterInput];
}
else
{
    NSLog(@"can't add asset writer input... die!");
    return;
}

assetWriterInput.expectsMediaDataInRealTime = NO;

[assetWriter startWriting];
[assetReader startReading];

AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];

__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);

[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
                                        usingBlock: ^
 {
     while (assetWriterInput.readyForMoreMediaData)
     {
         CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
         if (nextBuffer)
         {
             // append buffer
             [assetWriterInput appendSampleBuffer: nextBuffer];
             convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);

             CMSampleBufferInvalidate(nextBuffer);
             CFRelease(nextBuffer);
             nextBuffer = NULL;
         }
         else
         {
             [assetWriterInput markAsFinished];
             //              [assetWriter finishWriting];
             [assetReader cancelReading];

             break;
         }
     }
 }];
于 2013-12-19T09:53:33.220 に答える
0

私は次の方法を使用して解決しました:

  1. このファイルExtAudioFileConvert.cppを私のソリューションにインポートしました。( https://developer.apple.com/library/ios/samplecode/iPhoneExtAudioFileConvertTest/Listings/ExtAudioFileConvert_cpp.html )また、他の必要なファイルもすべてインポートしましたPublicUtility Folder

  2. この関数の呼び出しDoConvertFile

サンプルコード

CFURLRef sourceURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)sourcefilePath, kCFURLPOSIXPathStyle, false);

CFURLRef destinationURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);

OSStatus error = DoConvertFile((sourceURL, destinationURL, kAudioFormatAppleIMA4, 16000.0 );
于 2014-06-12T13:02:37.300 に答える