2016年にこれを読んでいて、swift 2.xで解決策を探しているなら、私はあなたを手に入れました。私のソリューションは、操作が非同期であるため、出力ファイルが書き込まれた後に出力ファイルを返すクロージャーを実装して、ゼロバイトのファイルをすぐに読み取らないようにします。これは特に、最初のトラックの継続時間を合計出力期間として使用して、2つのオーディオトラックをオーバーラップさせる場合に使用します。
func setUpAndAddAudioAtPath(assetURL: NSURL, toComposition composition: AVMutableComposition, duration: CMTime) {
let songAsset: AVURLAsset = AVURLAsset(URL: assetURL, options: nil)
let track: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let sourceAudioTrack: AVAssetTrack = songAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
var error: NSError? = nil
var ok: Bool = false
let startTime: CMTime = CMTimeMakeWithSeconds(0, 1)
let trackDuration: CMTime = songAsset.duration
//CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds)
let tRange: CMTimeRange = CMTimeRangeMake(startTime, duration)
//Set Volume
let trackMix: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: track)
trackMix.setVolume(1.0, atTime: kCMTimeZero)
audioMixParams.append(trackMix)
//Insert audio into track
try! track.insertTimeRange(tRange, ofTrack: sourceAudioTrack, atTime: CMTimeMake(0, 44100))}
func saveRecording(audio1: NSURL, audio2: NSURL, callback: (url: NSURL?, error: NSError?)->()) {
let composition: AVMutableComposition = AVMutableComposition()
//Add Audio Tracks to Composition
let avAsset1 = AVURLAsset(URL: audio1, options: nil)
var track1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
let assetTrack1:AVAssetTrack = track1[0]
let duration: CMTime = assetTrack1.timeRange.duration
setUpAndAddAudioAtPath(audio1, toComposition: composition, duration: duration)
setUpAndAddAudioAtPath(audio2, toComposition: composition, duration: duration)
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
audioMix.inputParameters = audioMixParams
//If you need to query what formats you can export to, here's a way to find out
NSLog("compatible presets for songAsset: %@", AVAssetExportSession.exportPresetsCompatibleWithAsset(composition))
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate()))-merge.m4a"
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let outputUrl = documentsDirectory.URLByAppendingPathComponent(currentFileName)
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport!.outputFileType = AVFileTypeAppleM4A
assetExport!.outputURL = outputUrl
assetExport!.exportAsynchronouslyWithCompletionHandler({
audioMixParams.removeAll()
switch assetExport!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport!.error)")
callback(url: nil, error: assetExport!.error)
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport!.error)")
callback(url: nil, error: assetExport!.error)
default:
print("complete")
callback(url: outputUrl, error: nil)
}
}) }