0

FFT を使用して、マイクに入るオーディオをリアルタイムで分析していますが、これは ayntask を使用している場合にのみ機能するようです。また、バックグラウンドで別のスレッドを実行する必要があります。どちらのアイテムも 3 秒間だけ実行する必要があります。

両方を実行しようとすると、ANR がタイムアウトします。これらを連携させる方法はありますか?

ディスプレイではなくファイルに書き込むだけなので、FFT を asynctask にする必要はありません。ただし、asynctask から FFT を引き出すと、toTransform 配列でエラーが発生します。

どんな助けでも大歓迎です。

public class SoundRecord extends Activity implements OnClickListener {

boolean started = false;
int sampleRate = 8000;

// originally from http://marblemice.blogspot.com/2010/04/generate-and-play-tone-in-android.html
// and modified by Steve Pomeroy <steve@staticfree.info>
private final int duration = 3; // seconds
private final int numSamples = duration * sampleRate;
private final double sample[] = new double[numSamples];
private final double freqOfTone = 1500; // hz
private final byte generatedSnd[] = new byte[2 * numSamples];

Handler handler = new Handler();
Timer timer = new Timer();

public int channelConfiguration = AudioFormat.CHANNEL_IN_MONO; 
public int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; 

//FFT
private RealDoubleFFT transformer;
int blockSize = 256;
Button startStopButton;
int frequency = 8000;
double freq;
String freqValue;
int bufferSize = AudioRecord.getMinBufferSize(frequency,
        channelConfiguration, audioEncoding);

AudioRecord audioRecord = new AudioRecord(
        MediaRecorder.AudioSource.MIC, frequency,
        channelConfiguration, audioEncoding, bufferSize);

GetFrequency recordTask;


@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_SoundRecord);

    Button button = (Button) findViewById(R.id.start);
    button.setOnClickListener(this);
}




@Override
public boolean onCreateOptionsMenu(Menu menu) {
    // Inflate the menu; this adds items to the action bar if it is present.
    getMenuInflater().inflate(R.menu.SoundRecord, menu);
    return true;
}

@Override
public void onClick(View v) {
    if (started) {
        started = false;
        recordTask.cancel(true);
    } else {
        final Thread soundout = new Thread(new Runnable() {
            public void run() {
                genTone();
                handler.post(new Runnable() {

                    public void run() {
                        playSound();
                    }
                });
            }
        });
        soundout.start();


    //Analyze frequency
    recordTask = new GetFrequency();
        try {
            recordTask.get();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (ExecutionException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }


    }
}

void genTone(){
    // fill out the array
    for (int i = 0; i < numSamples; ++i) {
        sample[i] = Math.sin(2 * Math.PI * i / (sampleRate/freqOfTone));
    }

    // convert to 16 bit pcm sound array
    // assumes the sample buffer is normalised.
    int idx = 0;
    for (final double dVal : sample) {
        // scale to maximum amplitude
        final short val = (short) ((dVal * 32767));
        // in 16 bit wav PCM, first byte is the low order byte
        generatedSnd[idx++] = (byte) (val & 0x00ff);
        generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);

    }
}

void playSound(){
    final AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            sampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, numSamples,
            AudioTrack.MODE_STATIC);
    audioTrack.write(generatedSnd, 0, generatedSnd.length);
    audioTrack.play();
}

private class GetFrequency extends AsyncTask<Void, double[], Void> {
    @Override
    protected Void doInBackground(Void... params) {
        try {
            short[] buffer = new short[blockSize];
            double[] toTransform = new double[blockSize];

            audioRecord.startRecording();

            while (started) {
                int bufferReadResult = audioRecord.read(buffer, 0, blockSize);

                for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
                    toTransform[i] = (double) buffer[i] / 32768.0; // signed // 16 // bit

                }
                transformer.ft(toTransform);
                publishProgress(toTransform); 
                timer.schedule(new TimerTask() {
                 public void run() {
                     handler.post(new Runnable() {
                         public void run() {
                             audioRecord.stop();
                         }
                     });
                    }
                }, 3000); //Stop after 3 seconds
            }
        } catch (Throwable t) {
            Log.e("AudioRecord", "Recording Failed");
        }

        return null;
    }

    protected void onProgressUpdate(double[]... toTransform) {
        File freqFile = new File("/mnt/sdcard/Test APKs/frequency.file");
        File magFile = new File("/mnt/sdcard/Test APKs/magnitude.file");
        int blockSize = 256;
        double[] audioDataDoubles = new double[(blockSize*2)];
        String mydate = java.text.DateFormat.getDateTimeInstance().format(Calendar.getInstance().getTime());

        if (!freqFile.exists())
        {
           try {
              freqFile.createNewFile();
           } catch (IOException e) {
              // TODO Auto-generated catch block
              e.printStackTrace();
           }
        }
        if (!magFile.exists())
        {
           try {
              magFile.createNewFile();
           } catch (IOException e) {
              // TODO Auto-generated catch block
              e.printStackTrace();
           }
        }

        for ( int x = 0; x < toTransform[0].length; x++) {
            audioDataDoubles[2*x] = toTransform[0][x];
            audioDataDoubles[(2*x)+1] = 0.0;
        }
        double[] re = new double[blockSize];
        double[] im = new double[blockSize];
        double[] magnitude = new double[blockSize];

        // Calculate the Real and imaginary and Magnitude.
        for(int i = 0; i < blockSize; i++){
            // real is stored in first part of array
            re[i] = audioDataDoubles[i*2];
            // imaginary is stored in the sequential part
            im[i] = audioDataDoubles[(2*i)+1];
            // magnitude is calculated by the square root of (imaginary^2 + real^2)
            magnitude[i] = Math.sqrt((re[i] * re[i]) + (im[i]*im[i]));
        }

        double peak = -1.0; 
        // Get the largest magnitude peak
        for(int i = 0; i < blockSize; i++){
            if(peak < magnitude[i])
                peak = magnitude[i];
            String magValue = String.valueOf(peak);
            try {
                BufferedWriter buf = new BufferedWriter(new FileWriter(magFile, true));
                buf.append(mydate + " - " + magValue);
                buf.newLine();
                buf.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
             }
        }
        // calculated the frequency
        freq = (frequency * peak)/blockSize; 
        freqValue = String.valueOf(freq);
        //Toast.makeText(Main.this, freqValue, Toast.LENGTH_SHORT).show();
        try {
            BufferedWriter buf = new BufferedWriter(new FileWriter(freqFile, true));
            buf.append(mydate + " - " + freqValue);
            buf.newLine();
            buf.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

}

4

1 に答える 1