1 | set flag NoAWT. set flag Android. set flag AndroidOnly. |
2 | |
3 | import android.app.*; |
4 | import android.content.*; |
5 | import android.os.Bundle; |
6 | import android.view.View; |
7 | import android.view.View.OnClickListener; |
8 | import android.speech.RecognitionListener; |
9 | import android.speech.RecognizerIntent; |
10 | import android.speech.SpeechRecognizer; |
11 | import android.widget.Button; |
12 | import android.widget.TextView; |
13 | import android.util.Log; |
14 | import android.media.*; |
15 | |
16 | static SpeechRecognizer sr; |
17 | static final String TAG = "MyStt3Activity"; |
18 | static S language = "en-US"; |
19 | static int extraResults = 1; |
20 | |
21 | static bool fastSynthesis = false; // use device voice (faster) |
22 | static bool noSpeech = false; // disable speech output |
23 | |
24 | static bool manualMode = false; // Start recognition on click only |
25 | sbool muteAlways, printErrors; |
26 | sbool listenAfterTalk = true; // it works now |
27 | |
28 | static volatile bool listening, recognitionCancelled; |
29 | |
30 | static new L<S> history; |
31 | static Lock speechLock = lock(); |
32 | |
33 | static boolean showPicture = true; // global switch. if false, you get the normal text display instead of the smiley |
34 | |
35 | static L<S> emotions = litlist( |
36 | "#1001283", "happy", |
37 | "#1001284", "unhappy", |
38 | "#1001285", "oh", |
39 | "#1001286", "bored", |
40 | "#1001287", "wide mouth"); |
41 | |
42 | static float statusFontSize = /*25*/17; |
43 | static float userFontSize = 25; |
44 | static float myFontSize = 25; |
45 | static int borderColor = 0xFFFFFFFF; |
46 | static S lastEmotionImage, lastEmotion /*, onScreenText = ""*/; |
47 | static ImageView emoView; |
48 | static TextView statusView, userTextView, myTextView, lblInputView; |
49 | sS statusText; |
50 | static EditText inputView; |
51 | sbool recogOnActivate = true; // start listening when app activated |
52 | sbool hideTitleBar; |
53 | |
54 | svoid androidCatMain {
|
55 | if (androidIsAdminMode()) {
|
56 | print("Going admin.");
|
57 | androidUnsetAdminMode(); |
58 | removeMyselfAsInjection(); |
59 | aShowStartScreen(); |
60 | ret; |
61 | } |
62 | |
63 | if (hideTitleBar) aHideTitleBar(); |
64 | |
65 | androidSay_keepEngine = true; |
66 | |
67 | if (muteAlways) androidMuteAudio(); |
68 | |
69 | S hello = null; |
70 | |
71 | /*if (args.length != 0 && args[0].equals("nogfx"))
|
72 | setOpt(getMainClass(), "showPicture", false);*/ |
73 | |
74 | try {
|
75 | history.add("*");
|
76 | //hello = callStaticAnswerMethod("*", history);
|
77 | if (hello == null) hello = german() ? "hallo" : "hello"; |
78 | } catch (Throwable e) {
|
79 | e.printStackTrace(); |
80 | return; |
81 | } |
82 | |
83 | if (!androidIsAdminMode()) |
84 | aClearConsole(); |
85 | |
86 | listening = true; // so user can cancel early |
87 | //if (!noSpeech) say(hello); |
88 | justASec(); // show interface |
89 | callOptMC('happy);
|
90 | |
91 | thread "Long-Poll" {
|
92 | repeat with sleep 1 {
|
93 | // TODO: interrupt connection on cleanup |
94 | // TODO: make it work for other users! |
95 | S action = postPageSilently("http://butter.botcompany.de:8080/1014866/raw", muricaCredentials());
|
96 | if (nempty(action)) |
97 | for (S s : lines(action)) say(s); |
98 | } |
99 | } |
100 | |
101 | // setLanguage(language); |
102 | |
103 | aAddMenuItems("Switch to manual mode", "Switch to auto mode");
|
104 | |
105 | // init engine? |
106 | if (german()) androidSayInGerman(""); else androidSayInEnglish("");
|
107 | |
108 | if (recognitionCancelled) recognitionCancelled = false; |
109 | else |
110 | androidUI(f newRecognizer); |
111 | |
112 | noMainDone(); |
113 | } |
114 | |
115 | static void newRecognizer() {
|
116 | //print("listening");
|
117 | listening = true; |
118 | sr = SpeechRecognizer.createSpeechRecognizer(androidActivity()); |
119 | sr.setRecognitionListener(new listener()); |
120 | recog(); |
121 | } |
122 | |
123 | static class listener implements RecognitionListener {
|
124 | public void onReadyForSpeech(Bundle params) {
|
125 | if (recognitionCancelled) {
|
126 | recognitionCancelled = false; |
127 | sr.stopListening(); |
128 | ret; |
129 | } |
130 | callOptMC('setBorderAndStatus, 0xFF66FF66,
|
131 | german() ? "JETZT SPRECHEN!" : "TALK NOW!"); |
132 | callOptMC('oh);
|
133 | //showText(german() ? "SAG WAS." : "TALK NOW."); |
134 | } |
135 | |
136 | public void onBeginningOfSpeech() {
|
137 | //showText("User talks");
|
138 | //callOptMC('oh);
|
139 | if (!manualMode && !muteAlways) |
140 | androidMuteAudio(); // Mute now, so we don't hear the end-of-speech sound |
141 | } |
142 | |
143 | public void onRmsChanged(float rmsdB) {}
|
144 | public void onBufferReceived(byte[] buffer) {}
|
145 | |
146 | public void onEndOfSpeech() {
|
147 | ping(); |
148 | //showText("onEndOfSpeech");
|
149 | callOptMC('setBorderAndStatus, aWhite(), baseStatus());
|
150 | } |
151 | |
152 | public void onError(int error) {
|
153 | ping(); |
154 | listening = false; |
155 | if (printErrors) |
156 | if (error == 6) // timeout |
157 | print("speech timeout");
|
158 | else |
159 | print("error " + error); // screw the errors!
|
160 | try {
|
161 | sr.destroy(); |
162 | } catch (Throwable e) {
|
163 | print(e); |
164 | } |
165 | if (!manualMode) |
166 | newRecognizer(); |
167 | else |
168 | callOptMC('setBorderAndStatus, aWhite(), baseStatus());
|
169 | callOpt(getMainClass(), "happy"); |
170 | } |
171 | |
172 | public void onResults(Bundle results) {
|
173 | ping(); |
174 | listening = false; |
175 | //showText("onResults");
|
176 | ArrayList<S> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); |
177 | fS s = data.get(0); |
178 | onInput(s, false); |
179 | } |
180 | |
181 | public void onPartialResults(Bundle partialResults) {
|
182 | print("onPartialResults");
|
183 | } |
184 | |
185 | public void onEvent(int eventType, Bundle params) {
|
186 | print("onEvent" + eventType);
|
187 | } |
188 | } |
189 | |
190 | svoid onInput(S _s, final bool typed) {
|
191 | fS s = trim(_s); if (empty(s)) ret; |
192 | thread "onInput" {
|
193 | mechQ(r {
|
194 | appendToMechList_noUniq("Katze Speech Recognition Log With Date", "[" + localDateWithMilliseconds() + ", " +
|
195 | (typed ? "typed on phone" : "through phone") + "] " + s) }); |
196 | aSetText(userTextView, lang("You: ", "Du: ") + s);
|
197 | aPutViewBehindOtherView(userTextView, myTextView); |
198 | |
199 | showText( |
200 | (typed |
201 | ? lang("You typed: ", "Du schrubst: ")
|
202 | : lang("I heard: ", "Ich habe geh\u00f6rt: ")) + quote(s));
|
203 | |
204 | // TODO: fix the java strings umlaut problem |
205 | |
206 | final boolean goodbye = match3("goodbye", s) || match3("bye", s) || match3("tsch\u00fcss", s) || match3("tsch\u00fcss ...", s);
|
207 | |
208 | // get answer |
209 | |
210 | history.add(s); |
211 | |
212 | handleCommand(s); |
213 | |
214 | S answer; |
215 | try {
|
216 | answer = goodbye ? "tsch\u00fcss" : callStaticAnswerMethod(s, history); |
217 | } catch (Throwable e) {
|
218 | e.printStackTrace(); |
219 | appendToFile(getProgramFile("errors.txt"), getTheStackTrace(e));
|
220 | answer = "Fehler"; |
221 | } |
222 | |
223 | if (answer != null) |
224 | print(answer); |
225 | |
226 | androidUI(r {
|
227 | if (goodbye) {
|
228 | print("\nGOODBYE!");
|
229 | sr.destroy(); |
230 | |
231 | callOpt(getMainClass(), "disappear"); |
232 | } else {
|
233 | sr.stopListening(); |
234 | listening = false; |
235 | //newRecognizer(); // always make a new one - gives endless errors |
236 | if (!manualMode) |
237 | recog(); |
238 | } |
239 | }); |
240 | } // end of thread |
241 | } |
242 | |
243 | svoid recog() {
|
244 | if (sr == null) ret with newRecognizer(); |
245 | print("recog");
|
246 | listening = true; |
247 | justASec(); |
248 | Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); |
249 | intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); |
250 | intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); |
251 | intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test"); |
252 | |
253 | intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, extraResults); |
254 | |
255 | // customize recognition time settings |
256 | callOpt(mc(), "recogTimeSettings", intent); |
257 | |
258 | sr.startListening(intent); |
259 | print("started listening");
|
260 | } |
261 | |
262 | svoid say(fS s) {
|
263 | ping(); |
264 | lock speechLock; |
265 | showText(">> " + s);
|
266 | aSetText(myTextView, lang("Me: ", "Ich: ") + s);
|
267 | aPutViewBehindOtherView(myTextView, userTextView); |
268 | if (manualMode && listening) {
|
269 | print("User is speaking, postponing speech output.");
|
270 | ret; |
271 | } |
272 | androidUI_wait(f stopListening); |
273 | history.add(s); |
274 | mechQ(r {
|
275 | appendToMechList_noUniq("Katze | Things the phone said", "[" + localDateWithMilliseconds() + "] " + s) });
|
276 | if (!noSpeech) {
|
277 | androidUnmuteAudio(); |
278 | callOptMC('oh);
|
279 | if (fastSynthesis) {
|
280 | if (german()) androidSayInGerman(s); else androidSayInEnglish(s); |
281 | } else {
|
282 | File mp3 = cereproc_silent(german() ? "Leopold" : "Jack", s); |
283 | androidPlayMp3(androidActivity(), mp3); |
284 | } |
285 | callOptMC('happy);
|
286 | if (listenAfterTalk) |
287 | //androidUI_noWait(r startListening); // works, but freezes UI |
288 | androidUI_noWait(r newRecognizer); |
289 | } |
290 | if (muteAlways) |
291 | androidMuteAudio(); |
292 | } |
293 | |
294 | static void stopListening() {
|
295 | listening = false; |
296 | if (sr != null) |
297 | sr.stopListening(); |
298 | } |
299 | |
300 | svoid startListening() {
|
301 | if (listening) ret; |
302 | listening = true; |
303 | recog(); |
304 | } |
305 | |
306 | sbool german() {
|
307 | ret swic(language, "de"); |
308 | } |
309 | |
310 | svoid switchToManual {
|
311 | stopListening(); |
312 | manualMode = true; |
313 | androidUnmuteAudio(); |
314 | } |
315 | |
316 | svoid switchToAuto {
|
317 | manualMode = false; |
318 | startListening(); |
319 | androidUnmuteAudio(); |
320 | } |
321 | |
322 | svoid onMenuCommand(S s) {
|
323 | //print("menu cmd: " + s);
|
324 | handleCommand(s); |
325 | } |
326 | |
327 | // spoken, typed or through menu |
328 | svoid handleCommand(S s) {
|
329 | if "stop listening|Switch to manual mode" switchToManual(); |
330 | if "Switch to auto mode" switchToAuto(); |
331 | if "german|deutsch" setLanguage("de-DE");
|
332 | if "english|englisch" setLanguage("en-US");
|
333 | } |
334 | |
335 | svoid cleanMeUp_leo {
|
336 | if (sr != null) {
|
337 | sr.destroy(); |
338 | sr = null; |
339 | } |
340 | } |
341 | |
342 | sS baseStatus() {
|
343 | ret !manualMode ? "" : |
344 | german() ? "KLICKEN ZUM SPRECHEN" : "CLICK ON ME IF YOU WANT TO TALK"; |
345 | } |
346 | |
347 | sS lang(S en, S de) {
|
348 | ret german() ? de : en; |
349 | } |
350 | |
351 | svoid setLanguage(S l) {
|
352 | language = l; |
353 | setCurrentBotLanguage(l); |
354 | aSetText(lblInputView, inputViewLabel()); |
355 | androidUI_noWait(r newRecognizer); |
356 | } |
357 | |
358 | svoid justASec {
|
359 | callOptMC('setBorderAndStatus, 0xFFFF0000,
|
360 | german() ? "BITTE WARTEN" : "JUST A SEC"); // (initializing speech recognizer) |
361 | } |
362 | |
363 | // Don't use - freezes UI |
364 | svoid _cancelRecognition {
|
365 | //print("Cancelling recognition " + listening + " " + (sr != null));
|
366 | recognitionCancelled = true; |
367 | //stopListening(); |
368 | //listening = false; |
369 | //if (sr != null) sr.cancel(); |
370 | //callOptMC('setBorderAndStatus, aWhite(), baseStatus());
|
371 | } |
372 | |
373 | static void showText(S text) {
|
374 | print(text); |
375 | /*if (neq(onScreenText, text) && lastEmotion != null) {
|
376 | onScreenText = text; |
377 | emo_show(); |
378 | }*/ |
379 | } |
380 | |
381 | static void emo(S emotion) {
|
382 | if (!showPicture) return; |
383 | int i; |
384 | for (i = 0; i < emotions.size(); i += 2) |
385 | if (emotions.get(i+1).equalsIgnoreCase(emotion)) |
386 | break; |
387 | if (i >= emotions.size()) {
|
388 | print("Emotion not found: " + emotion);
|
389 | // use the last one from the list as default |
390 | i -= 2; |
391 | } |
392 | lastEmotionImage = emotions.get(i); |
393 | lastEmotion = emotions.get(i+1); |
394 | emo_show(); |
395 | } |
396 | |
397 | static void emo_show() {
|
398 | if (!showPicture) return; |
399 | |
400 | androidUI {
|
401 | Runnable onClick = r {
|
402 | if (!manualMode) ret; |
403 | |
404 | //if (listening) |
405 | if (borderColor != -1) {
|
406 | //androidLater(500, r { stopListening(); });
|
407 | //showText("stop");
|
408 | stopListening(); |
409 | } else {
|
410 | //showText ("start");
|
411 | newRecognizer(); |
412 | } |
413 | }; |
414 | |
415 | if (statusView == null) {
|
416 | // init UI |
417 | |
418 | statusView = aFontSize(statusFontSize, aSetForeground(aBlack(), aCenteredTextView())); |
419 | inputView = aSingleLineEditText(); |
420 | aOnEnter(inputView, r { onInput(aGetText(inputView), true) });
|
421 | //aOnChange(inputView, f cancelRecognition); // freezes!? |
422 | //aOnChange(inputView, r { listening = false });
|
423 | //aOnChange(inputView, f cancelRecognition); |
424 | lblInputView = aFontSize(20, aBottomTextView(inputViewLabel())); |
425 | userTextView = aFontSize(userFontSize, aSetForeground(0xFF000055, aTextViewWithWindowFocusChangeNotify(voidfunc(Bool windowFocused) {
|
426 | if (windowFocused && recogOnActivate) newRecognizer(); |
427 | }))); |
428 | myTextView = aFontSize(myFontSize, aSetForeground(0xFF005500, aRightAlignedTextView()); |
429 | androidShow(aVerticalLinearLayout( |
430 | statusView, |
431 | emoView = androidClickableImage(lastEmotionImage, |
432 | aWhite(), onClick), |
433 | userTextView, |
434 | myTextView, |
435 | androidPrintLogScrollView(), |
436 | aWestAndCenter(lblInputView, inputView))); |
437 | } |
438 | |
439 | if (statusText == null) statusText = baseStatus(); |
440 | aSetText(statusView, statusText); |
441 | aSetImageFromSnippet(emoView, lastEmotionImage); |
442 | aSetBackground(emoView, borderColor); |
443 | |
444 | /*doEvery(1000, new Runnable {
|
445 | S text = ""; |
446 | |
447 | public void run() {
|
448 | S s = aGetText(inputView); |
449 | if (eq(s, text)) ret; |
450 | text = s; |
451 | cancelRecognition(); |
452 | } |
453 | });*/ |
454 | } |
455 | } |
456 | |
457 | static void setBorderAndStatus(int color, S status) {
|
458 | if (color != borderColor || neq(status, statusText)) {
|
459 | borderColor = color; |
460 | statusText = status; |
461 | if (lastEmotion != null) |
462 | emo(lastEmotion); |
463 | } |
464 | } |
465 | |
466 | static void setBorderColor(int color) {
|
467 | if (color != borderColor) {
|
468 | borderColor = color; |
469 | if (lastEmotion != null) |
470 | emo(lastEmotion); |
471 | } |
472 | } |
473 | |
474 | static void happy() { emo("happy"); }
|
475 | static void unhappy() { emo("unhappy"); }
|
476 | static void oh() { emo("oh"); }
|
477 | static void bored() { emo("bored"); }
|
478 | static void wideMouth() { emo("wide mouth"); }
|
479 | |
480 | static void disappear() {
|
481 | if (!showPicture) ret; |
482 | happy(); |
483 | androidLater(1000, r {
|
484 | androidShowFullScreenColor(0xFFFFFFFF); |
485 | androidLater(1000, r {
|
486 | System.exit(0); // totally unrecommended but works |
487 | }); |
488 | }); |
489 | } |
490 | |
491 | sS inputViewLabel() {
|
492 | ret lang("Or type here:", "Oder hier tippen:");
|
493 | } |
Began life as a copy of #1001279
download show line numbers debug dex old transpilations
Travelled to 7 computer(s): bhatertpkbcr, cfunsshuasjs, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt
No comments. add comment
| Snippet ID: | #1023267 |
| Snippet name: | Android Cat Main (include, v2, old) |
| Eternal ID of this version: | #1023267/6 |
| Text MD5: | 9a5847b67144bb3cd9d5083dac66abf5 |
| Author: | stefan |
| Category: | javax / a.i. |
| Type: | JavaX fragment (include) |
| Public (visible to everyone): | Yes |
| Archived (hidden from active list): | No |
| Created/modified: | 2019-05-22 14:12:02 |
| Source code size: | 13629 bytes / 493 lines |
| Pitched / IR pitched: | No / No |
| Views / Downloads: | 511 / 645 |
| Version history: | 5 change(s) |
| Referenced in: | [show references] |