以下のコードでは、sound1 と sound2 の 2 つのサウンドを作成しています。各サウンドには、同じサウンドを同時に再生できる多数のサンプルが含まれています。問題は、それぞれに AVAudioUnitTimePitch を持つ AVAudioPlayerNodes を 6 ~ 8 個以上作成すると、オーディオが完全に台無しになることです。サンプルの数を増やしすぎると、単一のサウンドを再生することさえできません。私のコードが間違っているのか、それとも AVAudioEngine のノード制限が何なのかわかりません。
class AudioManager{
var audioEngine:AVAudioEngine!;
var mixer:AVAudioMixerNode!;
var sound1:Sound!;
var sound2:Sound!;
init(){
audioEngine = AVAudioEngine();
mixer = audioEngine.mainMixerNode; //automatically creates instance of mixer node, output node, and connects
do{
try audioEngine.start();
}catch let e as NSError{
print("Error Starting AudioEngine \(e)");
}
sound1 = Sound(aManager: self, path: "assets/sounds/waterRefill", ofType: "mp3", numOfSamples: 7);
sound2 = Sound(aManager: self, path: "assets/sounds/balloonCreate", ofType: "mp3", numOfSamples: 2);
}
func playSound(){
sound1.play(1.0, pitch: 1.0);
}
func playSound2(){
sound2.play(1.0, pitch: 1.0);
}
class Sound {
var audioManager:AudioManager!;
var audioFileBuffer:AVAudioPCMBuffer!;
var numSamples:Int = 1;
var audioIndex:Int = 0;
var sampleList:[Sample] = [Sample]();
init(aManager:AudioManager, path:String, ofType:String, numOfSamples:Int){
audioManager = aManager;
if(numOfSamples < 1){
numSamples = 1;
}else{
numSamples = numOfSamples;
}
audioFileBuffer = createAudioBuffer(path, ofType: ofType);
for (var i = 0; i < numSamples; i++){
sampleList.append(Sample(sound: self));
}
}
func createAudioBuffer(path:String, ofType:String)-> AVAudioPCMBuffer?{
let filePath: String = NSBundle.mainBundle().pathForResource(path, ofType: ofType)!
let fileURL: NSURL = NSURL(fileURLWithPath: filePath)
do{
let audioFile = try AVAudioFile(forReading: fileURL)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
do{
try audioFile.readIntoBuffer(audioFileBuffer)
return audioFileBuffer;
}catch let e as NSError{
print("Error loading Audio Into Buffer: \(e)");
}
}catch let e as NSError{
print("Error loading Audio File: \(e)");
}
return nil;
}
private func runIndex(){
if(audioIndex < (numSamples-1)){
audioIndex++;
}else{
audioIndex = 0;
}
}
func play(volume:Float, pitch:Float){
var count:Int = 0;
while(count < numSamples){
if(numSamples > 1){
runIndex();
}
if (!sampleList[audioIndex].pitchPlayer.playing) {
sampleList[audioIndex].volume = volume;
sampleList[audioIndex].pitch = pitch;
sampleList[audioIndex].playSample();
break;
}
count++;
}
}
class Sample{
var parentSound:Sound!
var pitchPlayer:AVAudioPlayerNode!;
var timePitch:AVAudioUnitTimePitch!;
var volume:Float = 1.0
var pitch:Float = 1.0
init(sound:Sound){
parentSound = sound;
pitchPlayer = AVAudioPlayerNode();
timePitch = AVAudioUnitTimePitch();
parentSound.audioManager.audioEngine.attachNode(pitchPlayer);
parentSound.audioManager.audioEngine.attachNode(timePitch);
parentSound.audioManager.audioEngine.connect(pitchPlayer, to: timePitch, format: parentSound.audioFileBuffer.format);
parentSound.audioManager.audioEngine.connect(timePitch, to: parentSound.audioManager.mixer, format: parentSound.audioFileBuffer.format);
}
func playSample(){
pitchPlayer.volume = volume;
timePitch.pitch = pitch;
print("Sample Play");
pitchPlayer.play();
pitchPlayer.scheduleBuffer(parentSound.audioFileBuffer, atTime: nil, options:.Interrupts, completionHandler: {[unowned self]() in
print("Is Stopped: \(self.pitchPlayer.playing)");
self.pitchPlayer.stop();
print("Is Stopped: \(self.pitchPlayer.playing)");
});
}
}
}
}