Speech API で問題が発生しました。私はコマンドで作業していますが、それは正常に機能していますが、スピーチを textBox1 に表示したい場合、それは表示されません。
これは私が助けを必要としているコードです。スイッチケースを扱っています。if ステートメントをいくつか試しましたが、どれも機能しません。
case "listen":
AI.Speak("I am listening");
textBox1.Text = textBox1.Text + " " + e.Result.Text.ToString();
break;
聞くと言うたびに、「聞く」だけがテキストボックスに表示されます
完全なコードは次のとおりです。
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Threading;
using System.Windows.Forms;
using System.Speech.Recognition;
using System.Speech.Synthesis;
using System.IO;
using System.Xml;
using System.Web;
using WindowsMicrophoneMuteLibrary;
using TweetSharp;
/*
*
*
*
*
*
*/
namespace Test
{
public partial class Form1 : Form
{
SpeechRecognitionEngine sRecognizer = new SpeechRecognitionEngine();
SpeechSynthesizer AI = new SpeechSynthesizer();
DateTime now = DateTime.Now;
Random rnd = new Random();
WindowsMicMute micMute = new WindowsMicMute();
TwitterService twitter = new TwitterService("--", "--", "--", "--");
//string QEvent;
//string ProcWindow;
//double timer = 10;
//int count = 1;
public Form1()
{
InitializeComponent();
}
private void Form1_Load(object sender, EventArgs e)
{
sRecognizer.SetInputToDefaultAudioDevice();
sRecognizer.LoadGrammar(new Grammar(new GrammarBuilder(new Choices(File.ReadAllLines(@"D:\Bibliotheek\Mijn Documenten\Commands.txt")))));
sRecognizer.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(rSpeechRecognized);
sRecognizer.RecognizeAsync(RecognizeMode.Multiple);
// LAAD COMMANDS BIJ START-UP
string[] commands = (File.ReadAllLines(@"D:\Bibliotheek\Mijn Documenten\Commands.txt"));
lstCommands.Items.Clear();
lstCommands.SelectionMode = SelectionMode.None;
foreach (string command in commands)
{
lstCommands.Items.Add(command);
}
}
void rSpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
int ranNum = rnd.Next(1, 10);
string speech = e.Result.Text;
switch (speech)
{
// GROETEN
case "hello": // ALS "hello" WORDT INGESPROKEN IN DE MICROFOON
if (ranNum <= 3)
{
AI.Speak("Hello Sir"); // ALS RANDOM NUMMER < 5 IS = "hello sir"
}
else if (ranNum >= 4 && ranNum <= 6)
{
AI.Speak("Greetings"); // ALS RANDOM NUMMER >= 5 IS = "greetings"
}
else if (ranNum >= 7)
{
AI.Speak("Good day to you");
}
break;
case "AI": // ALS "AI" WORDT INGESPROKEN IN DE MICROFOON
if (ranNum <= 4)
{
AI.Speak("Yes Sir"); // ALS RANDOM NUMMER < 5 IS = "yes sir"
}
else if (ranNum >= 5)
{
AI.Speak("Yes?"); // ALS RANDOM NUMMER >= 5 IS = "yes?"
}
break;
// SLUIT
case "exit program": // ALS "exit program" WORDT INGESPROKEN IN DE MICROFOON
AI.Speak("Until next time");
this.Close(); // APPLICATIE WORDT GESLOTEN
break;
// WEBSITES
case "open google": // ALS "open google" WORDT INGESPROKEN IN DE MICROFOON
System.Diagnostics.Process.Start("http://www.google.nl"); // GOOGLE WORDT GEOPEND
break;
case "open youtube": // ALS "open youtube" WORDT INGESPROKEN IN DE MICROFOON
System.Diagnostics.Process.Start("https://www.youtube.com/feed/subscriptions"); // YOUTUBE WORDT GEOPEND
break;
case "open tweakers": // ALS "tweakers" WORDT INGESPROKEN IN DE MICROFOON
System.Diagnostics.Process.Start("http://tweakers.net/"); // TWEAKERS WORDT GEOPEND
break;
// PROGRAMMA'S
case "run guild wars": // ALS "run guild wars" WORDT INGESPROKEN IN DE MICROFOON
System.Diagnostics.Process.Start("D:\\Entertainment\\Guild Wars 2\\Gw2.exe"); // GUILD WARS 2 WORDT GEOPEND
AI.Speak("Loading program");
break;
// GEGEVENS VAN DE DAG
case "whats the time": // ALS "what time is it" WORDT INGESPROKEN IN DE MICROFOON
AI.Speak(DateTime.Now.ToString("HH:mm")); // TIJD VAN DE DAG WORDT VERTELD
break;
case "whats the day": // ALS "what day is it" WORDT INGESPROKEN IN DE MICROFOON
AI.Speak(DateTime.Today.ToString("dddd")); // DAG VAN VANDAAG WORDT VERTELD
break;
case "whats the date": // ALS "whats the date" WORDT INGESPROKEN IN DE MICROFOON
AI.Speak(DateTime.Today.ToString("dd-MMM-yyyy")); // DATUM VAN VANDAAG WORDT VERTELD
break;
// ANDERE COMMANDS
case "go fullscreen": // ALS "go fullscreen" WORDT INGESPROKEN IN DE MICROFOON
FormBorderStyle = FormBorderStyle.None;
WindowState = FormWindowState.Maximized;
TopMost = true;
AI.Speak("Going into fullscreen mode");
break;
case "exit fullscreen": // ALS "exit fullscreen" WORDT INGESPROKEN IN DE MICROFOON
FormBorderStyle = FormBorderStyle.Sizable;
WindowState = FormWindowState.Normal;
TopMost = false;
AI.Speak("Exiting fullscreen mode");
break;
// TWITTER
case "post on twitter":
if (listBox1.Visible == true)
{
this.textBox1.Location = new System.Drawing.Point(89, 163);
this.label1.Location = new System.Drawing.Point(18, 166);
}
textBox1.Visible = true;
label1.Visible = true;
break;
case "post":
if (textBox1.Visible == false)
{
AI.Speak("say post on twitter first");
}
else if (String.IsNullOrEmpty(textBox1.Text.Trim()))
{
AI.Speak("you will have to write down something");
}
else
{
twitter.SendTweet(new SendTweetOptions() { Status = textBox1.Text });
AI.Speak("Your tweet has been posted");
textBox1.Clear();
}
break;
case "clear post":
textBox1.Visible = false;
label1.Visible = false;
break;
case "show tweets":
listBox1.Visible = true;
label2.Visible = true;
if (textBox1.Visible == true)
{
this.textBox1.Location = new System.Drawing.Point(89, 163);
this.label1.Location = new System.Drawing.Point(18, 166);
}
listBox1.Visible = true;
label2.Visible = true;
listBox1.Items.Clear();
var getTweets = twitter.ListTweetsOnHomeTimeline(new ListTweetsOnHomeTimelineOptions() { Count = 10 });
foreach (var tweets in getTweets)
{
listBox1.Items.Add(tweets.Text);
}
break;
case "clear tweets":
listBox1.Visible = false;
label2.Visible = false;
this.textBox1.Location = new System.Drawing.Point(89, 9);
this.label1.Location = new System.Drawing.Point(18, 12);
break;
case "update tweets":
if (listBox1.Visible == false)
{
AI.Speak("I cant update without getting the tweets first");
}
else
{
listBox1.Items.Clear();
var update = twitter.ListTweetsOnHomeTimeline(new ListTweetsOnHomeTimelineOptions() { Count = 10 });
foreach (var tweets in update)
{
listBox1.Items.Add(tweets.Text);
}
}
break;
**case "listen":
AI.Speak("I am listening");
textBox1.Text = textBox1.Text + " " + e.Result.Text.ToString();
break;**
/*
case "show commands":
string[] commands = (File.ReadAllLines(@"D:\Bibliotheek\Mijn Documenten\Commands.txt"));
JARVIS.Speak("Very well");
lstCommands.Items.Clear();
lstCommands.SelectionMode = SelectionMode.None;
lstCommands.Visible = true;
foreach (string command in commands)
{
lstCommands.Items.Add(command);
}
break;
case "hide commands":
lstCommands.Visible = false;
break;
*/
}
}
private void lstCommands_SelectedIndexChanged(object sender, EventArgs e)
{
}
private void btnMic_Click(object sender, EventArgs e)
{
if (btnMic.Text == "Mute")
{
btnMic.Text = "Unmute";
micMute.MuteMic();
AI.Speak("Muted");
}
else if (btnMic.Text == "Unmute")
{
btnMic.Text = "Mute";
micMute.UnMuteMic();
AI.Speak("Unmuted");
}
}
}
}
編集:
コードのこの部分について助けが必要です:
case "listen":
AI.Speak("I am listening");
textBox1.Text = textBox1.Text + " " + e.Result.Text.ToString();
break;
「聞いて」と言うと、AIが「聞いてます」とフォローしてくれます。その後、テキストボックス1のマイクにテキストを配置する必要がありますが、そうではありません。'listen' を入れるだけです。
他のすべては正常に動作します!