4

起動時にテキスト読み上げを使用しているアプリがあります。すべてが完璧に機能しているように見え、アプリを実行しても問題はありません。ただし、アプリを起動するたびに、text-to-speechでServiceConnectionがリークしたことを示すLogCatエラーが発生します。これは現在機能に問題を引き起こしていませんが、エラーを無視し続けると、将来どうなるか心配です。コーディングが間違っているだけだと思いますが、何が間違っているかを特定する経験がありません。LogCatと問題のあるJavaを含めました。

07-31 10:16:50.812: E/ActivityThread(27785): Activity com.example.com.proto1.menu has leaked ServiceConnection android.speech.tts.TextToSpeech$1@40d68398 that was originally bound here
07-31 10:16:50.812: E/ActivityThread(27785): android.app.ServiceConnectionLeaked: Activity com.example.com.proto1.menu has leaked ServiceConnection android.speech.tts.TextToSpeech$1@40d68398 that was originally bound here
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.LoadedApk$ServiceDispatcher.<init>(LoadedApk.java:932)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.LoadedApk.getServiceDispatcher(LoadedApk.java:827)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.ContextImpl.bindService(ContextImpl.java:1109)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.content.ContextWrapper.bindService(ContextWrapper.java:370)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.speech.tts.TextToSpeech.initTts(TextToSpeech.java:517)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.speech.tts.TextToSpeech.<init>(TextToSpeech.java:483)
07-31 10:16:50.812: E/ActivityThread(27785):    at com.example.com.proto1.menu.onActivityResult(menu.java:255)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.Activity.dispatchActivityResult(Activity.java:4581)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.ActivityThread.deliverResults(ActivityThread.java:2817)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.ActivityThread.handleSendResult(ActivityThread.java:2864)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.ActivityThread.access$1000(ActivityThread.java:122)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1057)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.os.Handler.dispatchMessage(Handler.java:99)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.os.Looper.loop(Looper.java:132)
07-31 10:16:50.812: E/ActivityThread(27785):    at android.app.ActivityThread.main(ActivityThread.java:4126)
07-31 10:16:50.812: E/ActivityThread(27785):    at java.lang.reflect.Method.invokeNative(Native Method)
07-31 10:16:50.812: E/ActivityThread(27785):    at java.lang.reflect.Method.invoke(Method.java:491)
07-31 10:16:50.812: E/ActivityThread(27785):    at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:844)
07-31 10:16:50.812: E/ActivityThread(27785):    at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:602)
07-31 10:16:50.812: E/ActivityThread(27785):    at dalvik.system.NativeStart.main(Native Method)



import java.util.Locale;

import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Toast;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.support.v4.app.NavUtils;

@SuppressWarnings("unused")
public class mainj extends Activity implements OnInitListener {

    private TextToSpeech myTTS;
    // status check code
    private int MY_DATA_CHECK_CODE = 0;

    // setup TTS
    public void onInit(int initStatus) {

        // check for successful instantiation
        if (initStatus == TextToSpeech.SUCCESS) {
            if (myTTS.isLanguageAvailable(Locale.US) == TextToSpeech.LANG_AVAILABLE)
                myTTS.setLanguage(Locale.US);
        } else if (initStatus == TextToSpeech.ERROR) {
            Toast.makeText(this, "Sorry! Text To Speech failed...",
                    Toast.LENGTH_LONG).show();
        }
    }

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.loadscreen);
        Intent checkTTSIntent = new Intent();
        checkTTSIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
        startActivityForResult(checkTTSIntent, MY_DATA_CHECK_CODE);
        Thread logoTimer = new Thread() {
            public void run() {
                try {
                    try {
                        sleep(3000);
                        speakWords("main menu loaded");
                    } catch (InterruptedException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }

                    Intent menuIntent = new Intent("android.intent.action.MENU");
                    startActivity(menuIntent);

                }

                finally {
                    finish();
                }
            }

        };
        logoTimer.start();
    }

    // speak the user text
    private void speakWords(String speech) {

        // speak straight away
        if (myTTS != null) {
            myTTS.speak(speech, TextToSpeech.QUEUE_FLUSH, null);
        }
    }

    // act on result of TTS data check
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {

        if (requestCode == MY_DATA_CHECK_CODE) {
            if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) {
                // the user has the necessary data - create the TTS
                myTTS = new TextToSpeech(this, this);
            } else {
                // no data - install it now
                Intent installTTSIntent = new Intent();
                installTTSIntent
                        .setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
                startActivity(installTTSIntent);
            }
        }
    }

}


package com.example.com.proto1;

import android.app.Activity;
import android.content.Intent;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.speech.RecognizerIntent;
import android.view.View.OnClickListener;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.Toast;
import android.speech.tts.TextToSpeech;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;

@SuppressWarnings("unused")
public class menu extends Activity implements TextToSpeech.OnInitListener,
        OnClickListener {

    // defined
    TextToSpeech mTts;
    public static final int VOICE_RECOGNITION_REQUEST_CODE = 1234;

    // remember to include a listview on the xml or the voice recognition code
    // will not work
    public ListView mList;
    // TTS object
    Button speakButton, infoButton, voiceButton, talkButton;

    // TTS object
    public TextToSpeech myTTS;
    // status check code
    public int MY_DATA_CHECK_CODE = 0;

    // setup TTS
    public void onInit(int initStatus) {

        // check for successful instantiation
        if (initStatus == TextToSpeech.SUCCESS) {
            if (myTTS.isLanguageAvailable(Locale.US) == TextToSpeech.LANG_AVAILABLE)
                myTTS.setLanguage(Locale.US);
        } else if (initStatus == TextToSpeech.ERROR) {
            Toast.makeText(this, "Sorry! Text To Speech failed...",
                    Toast.LENGTH_LONG).show();
        }
    }

    @Override
    protected void onCreate(Bundle aboutmenu) {
        super.onCreate(aboutmenu);
        setContentView(R.layout.mainx);

        SpeakingAndroid speak = new SpeakingAndroid();

        VoiceRecognition voiceinput = new VoiceRecognition();

        // get a reference to the button element listed in the XML layout
        speakButton = (Button) findViewById(R.id.btn_speak);
        infoButton = (Button) findViewById(R.id.aboutbutton);
        voiceButton = (Button) findViewById(R.id.voicebutton);
        talkButton = (Button) findViewById(R.id.talk);

        // listen for clicks
        infoButton.setOnClickListener(this);
        speakButton.setOnClickListener(this);
        talkButton.setOnClickListener(this);

        // check for TTS data
        Intent checkTTSIntent = new Intent();
        checkTTSIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
        startActivityForResult(checkTTSIntent, MY_DATA_CHECK_CODE);

        // calling method
        voiceinputbuttons();

        // Check to see if a recognition activity is present
        // if running on AVD virtual device it will give this message. The mic
        // required only works on an actual android device//
        PackageManager pm = getPackageManager();
        List<ResolveInfo> activities = pm.queryIntentActivities(new Intent(
                RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
        if (activities.size() != 0) {
            voiceButton.setOnClickListener(this);
        } else {
            voiceButton.setEnabled(false);
            voiceButton.setText("Recognizer not present");
        }

    }

    public void quitApp() {
        speakWords("closing app");
        finish();
    }

    public void recogMenu() {
        speakWords("voice recognition menu");
        startActivity(new Intent("android.intent.action.RECOGNITIONMENU"));

    }

    public void informationMenu() {
        speakWords("information screen");
        startActivity(new Intent("android.intent.action.INFOSCREEN"));
    }

    public void mainMenu() {
        speakWords("main menu");
        startActivity(new Intent("android.intent.action.MENU"));
    }

    // creating method
    public void voiceinputbuttons() {
        speakButton = (Button) findViewById(R.id.btn_speak);
        mList = (ListView) findViewById(R.id.list);
    }

    // respond to button clicks
    public void onClick(View v) {
        switch (v.getId()) {

        // use switch case so each button does a different thing
        // accurately(similar to an if statement)
        case R.id.btn_speak:
            String words1 = speakButton.getText().toString();

            // speakwords(xxxx); is the piece of code that actually calls the
            // text to speech
            speakWords(words1);
            Intent voiceIntent = new Intent(
                    "android.intent.action.RECOGNITIONMENU");
            startActivity(voiceIntent);
            break;
        case R.id.aboutbutton:
            String words2 = infoButton.getText().toString();
            speakWords(words2);
            Intent infoIntent = new Intent("android.intent.action.INFOSCREEN");
            startActivity(infoIntent);
            break;
        case R.id.voicebutton:
            speakWords("Speak Now");
            startVoiceRecognitionActivity(); // call for voice recognition
                                                // activity
            break;
        case R.id.talk:
            speakWords("This is the main menu.");
            break;
        }
    }

    // speak the user text
    // setting up the speakWords code
    public void speakWords(String speech) {

        // speak straight away
        myTTS.speak(speech, TextToSpeech.QUEUE_FLUSH, null);
    }

    /**
     * Fire an intent to start the speech recognition activity.
     */
    public void startVoiceRecognitionActivity() {
        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
                "Speech recognition demo");
        startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
    }

    /**
     * Handle the results from the recognition activity.
     */
    @Override
    public void onActivityResult(int requestCode, int resultCode, Intent data) {
        if (requestCode == VOICE_RECOGNITION_REQUEST_CODE
                && resultCode == RESULT_OK) {
            // Fill the list view with the strings the recognizer thought it
            // could have heard
            ArrayList<String> matches = data
                    .getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
            mList.setAdapter(new ArrayAdapter<String>(this,
                    android.R.layout.simple_list_item_1, matches));
            // matches is the result of voice input. It is a list of what the
            // user possibly said.
            // Using an if statement for the keyword you want to use allows the
            // use of any activity if keywords match
            // it is possible to set up multiple keywords to use the same
            // activity so more than one word will allow the user
            // to use the activity (makes it so the user doesn't have to
            // memorize words from a list)
            // to use an activity from the voice input information simply use
            // the following format;
            // if (matches.contains("keyword here") { startActivity(new
            // Intent("name.of.manifest.ACTIVITY")

            if (matches.contains("information")) {
                informationMenu();
            }
            if (matches.contains("info screen")) {
                informationMenu();
            }
            if (matches.contains("info")) {
                informationMenu();
            }
            if (matches.contains("about")) {
                informationMenu();
            }

            if (matches.contains("home")) {
                mainMenu();
            }
            if (matches.contains("menu")) {
                mainMenu();
            }
            if (matches.contains("home screen")) {
                mainMenu();
            }
            if (matches.contains("speak")) {
                startActivity(new Intent("android.intent.action.SPEAK"));
            }
            if (matches.contains("close")) {
                quitApp();
            }
            if (matches.contains("stop")) {
                quitApp();
            }
            if (matches.contains("finish")) {
                quitApp();
            }
            if (matches.contains("voice")) {
                recogMenu();
            }
            if (matches.contains("recognition")) {
                recogMenu();
            }
            if (matches.contains("voice recognition")) {
                recogMenu();

            }

        }

        // still in the onActivityResult: This is for the text to speech part

        if (requestCode == MY_DATA_CHECK_CODE) {
            if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) {
                // the user has the necessary data - create the TTS
                myTTS = new TextToSpeech(this, this);
            } else {
                // no data - install it now
                Intent installTTSIntent = new Intent();
                installTTSIntent
                        .setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
                startActivity(installTTSIntent);
            }
        }

        super.onActivityResult(requestCode, resultCode, data);

    }


}



    08-01 08:19:18.942: E/ActivityThread(7310): Activity com.example.com.proto1.menu has leaked ServiceConnection android.speech.tts.TextToSpeech$1@40d68418 that was originally bound here
08-01 08:19:18.942: E/ActivityThread(7310): android.app.ServiceConnectionLeaked: Activity com.example.com.proto1.menu has leaked ServiceConnection android.speech.tts.TextToSpeech$1@40d68418 that was originally bound here
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.LoadedApk$ServiceDispatcher.<init>(LoadedApk.java:932)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.LoadedApk.getServiceDispatcher(LoadedApk.java:827)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.ContextImpl.bindService(ContextImpl.java:1109)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.content.ContextWrapper.bindService(ContextWrapper.java:370)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.speech.tts.TextToSpeech.initTts(TextToSpeech.java:517)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.speech.tts.TextToSpeech.<init>(TextToSpeech.java:483)
08-01 08:19:18.942: E/ActivityThread(7310):     at com.example.com.proto1.menu.onActivityResult(menu.java:255)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.Activity.dispatchActivityResult(Activity.java:4581)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.ActivityThread.deliverResults(ActivityThread.java:2817)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.ActivityThread.handleSendResult(ActivityThread.java:2864)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.ActivityThread.access$1000(ActivityThread.java:122)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1057)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.os.Handler.dispatchMessage(Handler.java:99)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.os.Looper.loop(Looper.java:132)
08-01 08:19:18.942: E/ActivityThread(7310):     at android.app.ActivityThread.main(ActivityThread.java:4126)
08-01 08:19:18.942: E/ActivityThread(7310):     at java.lang.reflect.Method.invokeNative(Native Method)
08-01 08:19:18.942: E/ActivityThread(7310):     at java.lang.reflect.Method.invoke(Method.java:491)
08-01 08:19:18.942: E/ActivityThread(7310):     at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:844)
08-01 08:19:18.942: E/ActivityThread(7310):     at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:602)
08-01 08:19:18.942: E/ActivityThread(7310):     at dalvik.system.NativeStart.main(Native Method)




import java.util.ArrayList;
import java.util.List;
import java.util.Locale;

import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.Toast;

public class Infoactive extends Activity implements OnClickListener,
        OnInitListener {

    public static final int VOICE_RECOGNITION_REQUEST_CODE = 1234;
    public ListView mList;
    public Button speakButton, about, voicetest;
    // TTS object
    public TextToSpeech myTTS;
    // status check code
    public int MY_DATA_CHECK_CODE = 0;

    // setup TTS
    public void onInit(int initStatus) {

        // check for successful instantiation
        if (initStatus == TextToSpeech.SUCCESS) {
            if (myTTS.isLanguageAvailable(Locale.US) == TextToSpeech.LANG_AVAILABLE)
                myTTS.setLanguage(Locale.US);
        } else if (initStatus == TextToSpeech.ERROR) {
            Toast.makeText(this, "Sorry! Text To Speech failed...",
                    Toast.LENGTH_LONG).show();
        }
    }

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.infoscreen);

        // get a reference to the button element listed in the XML layout
        about = (Button) findViewById(R.id.button1);
        voicetest = (Button) findViewById(R.id.button2);
        // listen for clicks
        about.setOnClickListener(this);

        // check for TTS data
        Intent checkTTSIntent = new Intent();
        checkTTSIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
        startActivityForResult(checkTTSIntent, MY_DATA_CHECK_CODE);

        voiceinputbuttons();

        // Check to see if a recognition activity is present
        PackageManager pm = getPackageManager();
        List<ResolveInfo> activities = pm.queryIntentActivities(new Intent(
                RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
        if (activities.size() != 0) {
            voicetest.setOnClickListener(this);
        } else {
            voicetest.setEnabled(false);
            voicetest.setText("Recognizer not present");
        }
    }

    // respond to button clicks
    @SuppressWarnings("unused")
    public void onClick(View v) {
        switch (v.getId()) {
        case R.id.button1:
            // get the text entered
            Button buttonText = (Button) findViewById(R.id.button1);
            String words = buttonText.getText().toString();
            speakWords("EyePhone will help vizoolly impaired users to avoid obstacles, identify faces, and recognize objects.");
            break;
        case R.id.button2:
            speakWords("Speak Now");
            startVoiceRecognitionActivity();
            break;

        }
    }

    // speak the user text
    public void speakWords(String speech) {

        // speak straight away
        myTTS.speak(speech, TextToSpeech.QUEUE_FLUSH, null);
    }

    public void voiceinputbuttons() {
        speakButton = (Button) findViewById(R.id.btn_speak);
        mList = (ListView) findViewById(R.id.list);
    }

    /**
     * Fire an intent to start the speech recognition activity.
     */
    public void startVoiceRecognitionActivity() {
        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
                "Speech recognition demo");
        startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
    }

    /**
     * Handle the results from the recognition activity.
     */
    @Override
    public void onActivityResult(int requestCode, int resultCode, Intent data) {
        if (requestCode == VOICE_RECOGNITION_REQUEST_CODE
                && resultCode == RESULT_OK) {
            // Fill the list view with the strings the recognizer thought it
            // could have heard
            ArrayList<String> matches = data
                    .getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
            mList.setAdapter(new ArrayAdapter<String>(this,
                    android.R.layout.simple_list_item_1, matches));
            // matches is the result of voice input. It is a list of what the
            // user possibly said.
            // Using an if statement for the keyword you want to use allows the
            // use of any activity if keywords match
            // it is possible to set up multiple keywords to use the same
            // activity so more than one word will allow the user
            // to use the activity (makes it so the user doesn't have to
            // memorize words from a list)
            // to use an activity from the voice input information simply use
            // the following format;
            // if (matches.contains("keyword here") { startActivity(new
            // Intent("name.of.manifest.ACTIVITY")

            if (matches.contains("information")) {
                startActivity(new Intent("android.intent.action.INFOSCREEN"));
            }
            if (matches.contains("info screen")) {
                startActivity(new Intent("android.intent.action.INFOSCREEN"));
            }
            if (matches.contains("info")) {
                startActivity(new Intent("android.intent.action.INFOSCREEN"));
            }
            if (matches.contains("about")) {
                startActivity(new Intent("android.intent.action.INFOSCREEN"));
            }

            if (matches.contains("home")) {
                startActivity(new Intent("android.intent.action.MENU"));
            }
            if (matches.contains("menu")) {
                startActivity(new Intent("android.intent.action.MENU"));
            }
            if (matches.contains("home screen")) {
                startActivity(new Intent("android.intent.action.MENU"));
            }
            if (matches.contains("speak")) {
                startActivity(new Intent("android.intent.action.SPEAK"));
            }
            if (matches.contains("close")) {
                finish();
            }
            if (matches.contains("stop")) {
                finish();
            }
            if (matches.contains("finish")) {
                finish();
            }
            if (matches.contains("voice")) {
                Intent voiceIntent = new Intent(
                        "android.intent.action.RECOGNITIONMENU");
                startActivity(voiceIntent);
            }
            if (matches.contains("recognition")) {
                Intent voiceIntent = new Intent(
                        "android.intent.action.RECOGNITIONMENU");
                startActivity(voiceIntent);
            }
            if (matches.contains("voice recognition")) {
                Intent voiceIntent = new Intent(
                        "android.intent.action.RECOGNITIONMENU");
                startActivity(voiceIntent);

            }

        }
        if (requestCode == MY_DATA_CHECK_CODE) {
            if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) {
                // the user has the necessary data - create the TTS
                myTTS = new TextToSpeech(this, this);
            } else {
                // no data - install it now
                Intent installTTSIntent = new Intent();
                installTTSIntent
                        .setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
                startActivity(installTTSIntent);
            }
        }

        super.onActivityResult(requestCode, resultCode, data);

    }

}
4

1 に答える 1

11

myTTS.shutdown()使い終わったら、たとえば、で電話する必要がありますonDestroy

于 2012-07-31T14:34:16.050 に答える