1

彼ら!リモート IO を使用してストリーム オーディオを再生するのに問題があります。PCM フレーム データを入力する前に確認しましたが、正しいです。混乱しています。助けてもらえますか? どうもありがとう!以下は私のコードです。-

(void)initializeAudioPlay
    {
        OSStatus status;

        // Describe audio component
        AudioComponentDescription desc;
        desc.componentType = kAudioUnitType_Output;
        desc.componentSubType = kAudioUnitSubType_RemoteIO;
        desc.componentFlags = 0;
        desc.componentFlagsMask = 0;
        desc.componentManufacturer = kAudioUnitManufacturer_Apple;

        // Get component
        AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);

        // Get audio units
        status = AudioComponentInstanceNew(inputComponent, &audioPlayUnit);
        [self checkStatus:status];

        // Enable IO for playback
        UInt32 flag = 1;
        //kAUVoiceIOProperty_VoiceProcessingEnableAGC
        status = AudioUnitSetProperty(audioPlayUnit, kAudioOutputUnitProperty_EnableIO,
                                      kAudioUnitScope_Input, kOutputBus, &flag, sizeof(flag));
        [self checkStatus:status];

        // Describe format
        AudioStreamBasicDescription audioFormat;
        memset(&audioFormat, 0, sizeof(audioFormat));
        audioFormat.mSampleRate         = 8000;
        audioFormat.mFormatID           = kAudioFormatLinearPCM;
        audioFormat.mFormatFlags        = kAudioFormatFlagsCanonical;//kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsSignedInteger;
        /*kAudioFormatFlagsCanonical
        | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift)*/
        audioFormat.mFramesPerPacket    = 1;
        audioFormat.mChannelsPerFrame   = 1;
        audioFormat.mBitsPerChannel     = 16;
        audioFormat.mBytesPerFrame      = (audioFormat.mBitsPerChannel/8) * audioFormat.mChannelsPerFrame;
        audioFormat.mBytesPerPacket     = audioFormat.mBytesPerFrame;

        // Apply format
        status = AudioUnitSetProperty(audioPlayUnit,
                                      kAudioUnitProperty_StreamFormat,
                                      kAudioUnitScope_Input,
                                      kOutputBus,
                                      &audioFormat,
                                      sizeof(audioFormat));

        [self checkStatus:status];


        float value = (float)10 / 255.0;

        AudioUnitSetParameter(audioPlayUnit, kAudioUnitParameterUnit_LinearGain, kAudioUnitScope_Input, 0, value, 0);

        AudioChannelLayout          new_layout;
        new_layout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
        AudioUnitSetProperty( audioPlayUnit,
                             kAudioUnitProperty_AudioChannelLayout,
                             kAudioUnitScope_Global,
                             0, &new_layout, sizeof(new_layout) );

        UInt32 bypassEffect = kAudioUnitProperty_RenderQuality;
        status = AudioUnitSetProperty(audioPlayUnit,
                                   kAudioUnitProperty_RenderQuality,
                                   kAudioUnitScope_Global,
                                   0,
                                   &bypassEffect,
                                   sizeof(bypassEffect));
        [self checkStatus:status];

        // Set output callback
        AURenderCallbackStruct callbackStruct;
        callbackStruct.inputProc = playCallback;
        callbackStruct.inputProcRefCon = self;
        status = AudioUnitSetProperty(audioPlayUnit,
                                      kAudioUnitProperty_SetRenderCallback,
                                      kAudioUnitScope_Input,
                                      kOutputBus,
                                      &callbackStruct,
                                      sizeof(callbackStruct));
        [self checkStatus:status];

        flag = 0;

        // Initialize
        status = AudioUnitInitialize(audioPlayUnit);
        [self checkStatus:status];
        DGLog(@"audio play unit initialize = %d", status);
        circularBuf = [[CircularBuf alloc] initWithBufLen:kBufferLength];
       /*
        AudioSessionInitialize(NULL, NULL, NULL, NULL);

        Float64 rate =32000.0;
        AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareSampleRate, sizeof(rate), &rate);

        Float32 volume=20.0;
        UInt32 size = sizeof(Float32);
        AudioSessionSetProperty(
                                kAudioSessionProperty_PreferredHardwareIOBufferDuration,
                                &size, &volume);

        //float aBufferLength = 0.185759637188209;

        //AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof(aBufferLength), &aBufferLength);

        AudioSessionSetActive(YES);
        */
        AudioSessionInitialize(NULL, NULL, NULL, nil);
        AudioSessionSetActive(true);


        UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback ;
        /* for Iphone we need to do this to route the audio to speaker */
        status= AudioSessionSetProperty (
                                         kAudioSessionProperty_AudioCategory,
                                         sizeof (sessionCategory),
                                         &sessionCategory
                                         );
        //NSLog(@"Error: %d", status);
    //    
    //  UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
    //  status = AudioSessionSetProperty (
    //                                      kAudioSessionProperty_OverrideAudioRoute,
    //                                      sizeof (audioRouteOverride),
    //                                      &audioRouteOverride);

        UInt32 audioMixed = 1;
        status = AudioSessionSetProperty (
                                          kAudioSessionProperty_OverrideCategoryMixWithOthers,
                                          sizeof (audioMixed),
                                          &audioMixed);
    }

    - (void)processAudio:(AudioBuffer *)buffer
    {
        short pcmTemp[160];
        unsigned char * amrBuffer=NULL;
        AudioUnitSampleType sample;
        int i = 0;
        int j = 0;

        if ([circularBuf isReadTwoRegion]) {
            amrBuffer = [circularBuf ReadData];
        } else {
            amrBuffer = [circularBuf ReadData];
            i = [circularBuf ReadPos];
        }
        j = i + circularBuf.Length;

        if (j - i >= 320) {

            memcpy((void*)pcmTemp, (void*)amrBuffer, 320);
            for(i=0; i<160; i++)
            {
                sample = 3.162277*pcmTemp[i];//10db
                if(sample > 32767)sample = 32767;
                else if(sample < -32768)sample = -32768;

                buffData[i] = sample;
            }

            memcpy(buffer->mData, buffData, buffer->mDataByteSize);

            [circularBuf AdvanceReadPos:320];
        }
        else
        {

            memset(buffer->mData, 0, buffer->mDataByteSize);
        }
    }


    /**
     This callback is called when the audioUnit needs new data to play through the
     speakers. If you don't have any, just don't write anything in the buffers
     */
    static OSStatus playCallback(void *inRefCon,
                                 AudioUnitRenderActionFlags *ioActionFlags,
                                 const AudioTimeStamp *inTimeStamp,
                                 UInt32 inBusNumber,
                                 UInt32 inNumberFrames,
                                 AudioBufferList *ioData) {
        // Notes: ioData contains buffers (may be more than one!)
        // Fill them up as much as you can. Remember to set the size value in each buffer to match how
        // much data is in the buffer.
        AudioPlay *audioPlay = (AudioPlay *)inRefCon;

        for ( int i=0; i < ioData->mNumberBuffers; i++ ) {
            memset(ioData->mBuffers[i].mData, 0, ioData->mBuffers[i].mDataByteSize);
        }
        ioData->mBuffers[0].mNumberChannels = 1;

        [audioPlay processAudio:&ioData->mBuffers[0]];
        return noErr;
    }
4

0 に答える 0