1 | set flag NoAWT. set flag Android. |
2 | |
3 | import android.app.*; |
4 | import android.content.*; |
5 | import android.os.Bundle; |
6 | import android.view.View; |
7 | import android.view.View.OnClickListener; |
8 | import android.speech.RecognitionListener; |
9 | import android.speech.RecognizerIntent; |
10 | import android.speech.SpeechRecognizer; |
11 | import android.widget.Button; |
12 | import android.widget.TextView; |
13 | import android.util.Log; |
14 | import android.media.*; |
15 | |
16 | sS earURL = "http://botcompany.de:8082"; |
17 | |
18 | static SpeechRecognizer sr; |
19 | static final String TAG = "MyStt3Activity"; |
20 | static S language = "en-US"; |
21 | static int extraResults = 1; |
22 | |
23 | static bool fastSynthesis = false; // use device voice (faster) |
24 | static bool noSpeech = false; // disable speech output |
25 | |
26 | static bool manualMode = false; // Start recognition on click only |
27 | sbool muteAlways, printErrors; |
28 | sbool listenAfterTalk = true; // it works now |
29 | |
30 | static volatile bool listening, recognitionCancelled; |
31 | |
32 | static new L<S> history; |
33 | static Lock speechLock = lock(); |
34 | |
35 | static boolean showPicture = true; // global switch. if false, you get the normal text display instead of the smiley |
36 | |
37 | static L<S> emotions = litlist( |
38 | "#1001283", "happy", |
39 | "#1001284", "unhappy", |
40 | "#1001285", "oh", |
41 | "#1001286", "bored", |
42 | "#1001287", "wide mouth"); |
43 | |
44 | static float statusFontSize = /*25*/17; |
45 | static float userFontSize = 25; |
46 | static float myFontSize = 25; |
47 | static int borderColor = 0xFFFFFFFF; |
48 | static int waitColor = /*0xFFFF0000*/ 0xFFFFFF00; // yellow |
49 | static S lastEmotionImage, lastEmotion /*, onScreenText = ""*/; |
50 | static ImageView emoView; |
51 | static TextView statusView, userTextView, myTextView, lblInputView; |
52 | static LinearLayout leftFlag, rightFlag; |
53 | sS statusText; |
54 | static EditText inputView; |
55 | sbool recogOnActivate = true; // start listening when app activated |
56 | sbool hideTitleBar; |
57 | |
58 | svoid androidCatMain { |
59 | if (androidIsAdminMode()) { |
60 | print("Going admin."); |
61 | androidUnsetAdminMode(); |
62 | removeMyselfAsInjection(); |
63 | aShowStartScreen(); |
64 | ret; |
65 | } |
66 | |
67 | if (hideTitleBar) aHideTitleBar(); |
68 | |
69 | androidSay_keepEngine = true; |
70 | |
71 | if (muteAlways) androidMuteAudio(); |
72 | |
73 | S hello = null; |
74 | |
75 | /*if (args.length != 0 && args[0].equals("nogfx")) |
76 | setOpt(getMainClass(), "showPicture", false);*/ |
77 | |
78 | try { |
79 | history.add("*"); |
80 | //hello = callStaticAnswerMethod("*", history); |
81 | if (hello == null) hello = german() ? "hallo" : "hello"; |
82 | } catch (Throwable e) { |
83 | e.printStackTrace(); |
84 | return; |
85 | } |
86 | |
87 | if (!androidIsAdminMode()) |
88 | aClearConsole(); |
89 | |
90 | listening = true; // so user can cancel early |
91 | //if (!noSpeech) say(hello); |
92 | justASec(); // show interface |
93 | callOptMC('happy); |
94 | |
95 | thread "Long-Poll" { |
96 | repeat with sleep 1 { |
97 | // TODO: interrupt connection on cleanup |
98 | // TODO: make it work for other users! |
99 | S action = postPageSilently(earURL + "/poll", muricaCredentials()); |
100 | if (nempty(action)) |
101 | for (S s : lines(action)) say(s); |
102 | } |
103 | } |
104 | |
105 | // setLanguage(language); |
106 | |
107 | aAddMenuItems("Switch to manual mode", "Switch to auto mode"); |
108 | |
109 | // init engine? |
110 | if (german()) androidSayInGerman(""); else androidSayInEnglish(""); |
111 | |
112 | if (recognitionCancelled) recognitionCancelled = false; |
113 | else |
114 | androidUI(f newRecognizer); |
115 | |
116 | noMainDone(); |
117 | } |
118 | |
119 | static void newRecognizer() { |
120 | //print("listening"); |
121 | listening = true; |
122 | sr = SpeechRecognizer.createSpeechRecognizer(androidActivity()); |
123 | sr.setRecognitionListener(new listener()); |
124 | recog(); |
125 | } |
126 | |
127 | static class listener implements RecognitionListener { |
128 | public void onReadyForSpeech(Bundle params) { |
129 | if (recognitionCancelled) { |
130 | recognitionCancelled = false; |
131 | sr.stopListening(); |
132 | ret; |
133 | } |
134 | callOptMC('setBorderAndStatus, 0xFF66FF66, |
135 | german() ? "JETZT SPRECHEN!" : "TALK NOW!"); |
136 | callOptMC('oh); |
137 | //showText(german() ? "SAG WAS." : "TALK NOW."); |
138 | } |
139 | |
140 | public void onBeginningOfSpeech() { |
141 | //showText("User talks"); |
142 | //callOptMC('oh); |
143 | if (!manualMode && !muteAlways) |
144 | androidMuteAudio(); // Mute now, so we don't hear the end-of-speech sound |
145 | } |
146 | |
147 | public void onRmsChanged(float rmsdB) {} |
148 | public void onBufferReceived(byte[] buffer) {} |
149 | |
150 | public void onEndOfSpeech() { |
151 | ping(); |
152 | //showText("onEndOfSpeech"); |
153 | callOptMC('setBorderAndStatus, aWhite(), baseStatus()); |
154 | } |
155 | |
156 | public void onError(int error) { |
157 | ping(); |
158 | listening = false; |
159 | if (printErrors) |
160 | if (error == 6) // timeout |
161 | print("speech timeout"); |
162 | else |
163 | print("error " + error); // screw the errors! |
164 | try { |
165 | sr.destroy(); |
166 | } catch (Throwable e) { |
167 | print(e); |
168 | } |
169 | if (!manualMode) |
170 | newRecognizer(); |
171 | else |
172 | callOptMC('setBorderAndStatus, aWhite(), baseStatus()); |
173 | callOpt(getMainClass(), "happy"); |
174 | } |
175 | |
176 | public void onResults(Bundle results) { |
177 | ping(); |
178 | listening = false; |
179 | //showText("onResults"); |
180 | ArrayList<S> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); |
181 | fS s = data.get(0); |
182 | onInput(s, false); |
183 | } |
184 | |
185 | public void onPartialResults(Bundle partialResults) { |
186 | print("onPartialResults"); |
187 | } |
188 | |
189 | public void onEvent(int eventType, Bundle params) { |
190 | print("onEvent" + eventType); |
191 | } |
192 | } |
193 | |
194 | svoid onInput(S _s, final bool typed) { |
195 | fS s = trim(_s); if (empty(s)) ret; |
196 | thread "onInput" { |
197 | postPage(earURL, paramsPlus(muricaCredentials(), input := s)); |
198 | aSetText(userTextView, lang("You: ", "Du: ") + s); |
199 | aPutViewBehindOtherView(userTextView, myTextView); |
200 | |
201 | showText( |
202 | (typed |
203 | ? lang("You typed: ", "Du schrubst: ") |
204 | : lang("I heard: ", "Ich habe geh\u00f6rt: ")) + quote(s)); |
205 | |
206 | // TODO: fix the java strings umlaut problem |
207 | |
208 | final boolean goodbye = match3("goodbye", s) || match3("bye", s) || match3("tsch\u00fcss", s) || match3("tsch\u00fcss ...", s); |
209 | |
210 | // get answer |
211 | |
212 | history.add(s); |
213 | |
214 | handleCommand(s); |
215 | |
216 | S answer; |
217 | try { |
218 | answer = goodbye ? "tsch\u00fcss" : callStaticAnswerMethod(s, history); |
219 | } catch (Throwable e) { |
220 | e.printStackTrace(); |
221 | appendToFile(getProgramFile("errors.txt"), getTheStackTrace(e)); |
222 | answer = "Fehler"; |
223 | } |
224 | |
225 | if (answer != null) |
226 | print(answer); |
227 | |
228 | androidUI(r { |
229 | if (goodbye) { |
230 | print("\nGOODBYE!"); |
231 | sr.destroy(); |
232 | |
233 | callOpt(getMainClass(), "disappear"); |
234 | } else { |
235 | sr.stopListening(); |
236 | listening = false; |
237 | //newRecognizer(); // always make a new one - gives endless errors |
238 | if (!manualMode) |
239 | recog(); |
240 | } |
241 | }); |
242 | } // end of thread |
243 | } |
244 | |
245 | svoid recog() { |
246 | if (sr == null) ret with newRecognizer(); |
247 | print("recog"); |
248 | listening = true; |
249 | justASec(); |
250 | Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); |
251 | intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); |
252 | intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); |
253 | intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test"); |
254 | |
255 | intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, extraResults); |
256 | |
257 | // customize recognition time settings |
258 | callOpt(mc(), "recogTimeSettings", intent); |
259 | |
260 | sr.startListening(intent); |
261 | print("started listening"); |
262 | } |
263 | |
264 | svoid say(fS s) { |
265 | ping(); |
266 | lock speechLock; |
267 | showText(">> " + s); |
268 | aSetText(myTextView, lang("Me: ", "Ich: ") + s); |
269 | aPutViewBehindOtherView(myTextView, userTextView); |
270 | if (manualMode && listening) { |
271 | print("User is speaking, postponing speech output."); |
272 | ret; |
273 | } |
274 | androidUI_wait(f stopListening); |
275 | history.add(s); |
276 | mechQ(r { |
277 | appendToMechList_noUniq("Katze | Things the phone said", "[" + localDateWithMilliseconds() + "] " + s) }); |
278 | if (!noSpeech) { |
279 | androidUnmuteAudio(); |
280 | callOptMC('oh); |
281 | if (fastSynthesis) { |
282 | if (german()) androidSayInGerman(s); else androidSayInEnglish(s); |
283 | } else { |
284 | File mp3 = cereproc_silent(german() ? "Leopold" : "Jack", s); |
285 | androidPlayMp3(androidActivity(), mp3); |
286 | } |
287 | callOptMC('happy); |
288 | if (listenAfterTalk) |
289 | //androidUI_noWait(r startListening); // works, but freezes UI |
290 | androidUI_noWait(r newRecognizer); |
291 | } |
292 | if (muteAlways) |
293 | androidMuteAudio(); |
294 | } |
295 | |
296 | static void stopListening() { |
297 | listening = false; |
298 | if (sr != null) |
299 | sr.stopListening(); |
300 | } |
301 | |
302 | svoid startListening() { |
303 | if (listening) ret; |
304 | listening = true; |
305 | recog(); |
306 | } |
307 | |
308 | sbool german() { |
309 | ret swic(language, "de"); |
310 | } |
311 | |
312 | svoid switchToManual { |
313 | stopListening(); |
314 | manualMode = true; |
315 | androidUnmuteAudio(); |
316 | } |
317 | |
318 | svoid switchToAuto { |
319 | manualMode = false; |
320 | startListening(); |
321 | androidUnmuteAudio(); |
322 | } |
323 | |
324 | svoid onMenuCommand(S s) { |
325 | //print("menu cmd: " + s); |
326 | handleCommand(s); |
327 | } |
328 | |
329 | // spoken, typed or through menu |
330 | svoid handleCommand(S s) { |
331 | if "stop listening|Switch to manual mode" switchToManual(); |
332 | if "Switch to auto mode" switchToAuto(); |
333 | if "german|deutsch" setLanguage("de-DE"); |
334 | if "english|englisch" setLanguage("en-US"); |
335 | } |
336 | |
337 | svoid cleanMeUp_leo { |
338 | if (sr != null) { |
339 | sr.destroy(); |
340 | sr = null; |
341 | } |
342 | } |
343 | |
344 | sS baseStatus() { |
345 | ret !manualMode ? "" : |
346 | german() ? "KLICKEN ZUM SPRECHEN" : "CLICK ON ME IF YOU WANT TO TALK"; |
347 | } |
348 | |
349 | sS lang(S en, S de) { |
350 | ret german() ? de : en; |
351 | } |
352 | |
353 | svoid setLanguage(S l) { |
354 | language = l; |
355 | setCurrentBotLanguage(l); |
356 | aSetText(lblInputView, inputViewLabel()); |
357 | androidUI_noWait(r newRecognizer); |
358 | } |
359 | |
360 | svoid justASec { |
361 | callOptMC('setBorderAndStatus, waitColor, |
362 | german() ? "BITTE WARTEN" : "JUST A SEC"); // (initializing speech recognizer) |
363 | } |
364 | |
365 | // Don't use - freezes UI |
366 | svoid _cancelRecognition { |
367 | //print("Cancelling recognition " + listening + " " + (sr != null)); |
368 | recognitionCancelled = true; |
369 | //stopListening(); |
370 | //listening = false; |
371 | //if (sr != null) sr.cancel(); |
372 | //callOptMC('setBorderAndStatus, aWhite(), baseStatus()); |
373 | } |
374 | |
375 | static void showText(S text) { |
376 | print(text); |
377 | /*if (neq(onScreenText, text) && lastEmotion != null) { |
378 | onScreenText = text; |
379 | emo_show(); |
380 | }*/ |
381 | } |
382 | |
383 | static void emo(S emotion) { |
384 | if (!showPicture) return; |
385 | int i; |
386 | for (i = 0; i < emotions.size(); i += 2) |
387 | if (emotions.get(i+1).equalsIgnoreCase(emotion)) |
388 | break; |
389 | if (i >= emotions.size()) { |
390 | print("Emotion not found: " + emotion); |
391 | // use the last one from the list as default |
392 | i -= 2; |
393 | } |
394 | lastEmotionImage = emotions.get(i); |
395 | lastEmotion = emotions.get(i+1); |
396 | emo_show(); |
397 | } |
398 | |
399 | static void emo_show() { |
400 | if (!showPicture) return; |
401 | |
402 | androidUI { |
403 | Runnable onClick = r { |
404 | if (!manualMode) ret; |
405 | |
406 | //if (listening) |
407 | if (borderColor != -1) { |
408 | //androidLater(500, r { stopListening(); }); |
409 | //showText("stop"); |
410 | stopListening(); |
411 | } else { |
412 | //showText ("start"); |
413 | newRecognizer(); |
414 | } |
415 | }; |
416 | |
417 | if (statusView == null) { |
418 | // init UI |
419 | |
420 | statusView = aFontSize(statusFontSize, aSetForeground(aBlack(), aCenteredTextView())); |
421 | inputView = aSingleLineEditText(); |
422 | aOnEnter(inputView, r { onInput(aGetText(inputView), true) }); |
423 | //aOnChange(inputView, f cancelRecognition); // freezes!? |
424 | //aOnChange(inputView, r { listening = false }); |
425 | //aOnChange(inputView, f cancelRecognition); |
426 | lblInputView = aFontSize(20, aBottomTextView(inputViewLabel())); |
427 | userTextView = aFontSize(userFontSize, aSetForeground(0xFF000055, aTextViewWithWindowFocusChangeNotify(voidfunc(Bool windowFocused) { |
428 | if (windowFocused && recogOnActivate) newRecognizer(); |
429 | }))); |
430 | myTextView = aFontSize(myFontSize, aSetForeground(0xFF005500, aRightAlignedTextView()); |
431 | androidShow(aVerticalLinearLayout( |
432 | statusView, |
433 | aWestCenterAndEast/*_alignTop2*/( |
434 | leftFlag = aVerticalLinearLayout(androidClickableImage(#1101639, 0 /*transparent*/, r { setLanguage("en-US") })), |
435 | emoView = androidClickableImage(lastEmotionImage, aWhite(), onClick), |
436 | rightFlag = aVerticalLinearLayout(androidClickableImage(#1101638, 0 /*transparent*/, r { setLanguage("de-DE") })) |
437 | ), |
438 | userTextView, |
439 | myTextView, |
440 | androidPrintLogScrollView(), |
441 | aWestAndCenter(lblInputView, inputView))); |
442 | } |
443 | |
444 | if (statusText == null) statusText = baseStatus(); |
445 | aSetText(statusView, statusText); |
446 | aSetImageFromSnippet(emoView, lastEmotionImage); |
447 | aSetBackground(emoView, borderColor); |
448 | aSetBackground(eq(language, "en-US") ? borderColor : 0xFFCCCCCC, leftFlag); |
449 | aSetBackground(eq(language, "de-DE") ? borderColor : 0xFFCCCCCC, rightFlag); |
450 | |
451 | |
452 | /*doEvery(1000, new Runnable { |
453 | S text = ""; |
454 | |
455 | public void run() { |
456 | S s = aGetText(inputView); |
457 | if (eq(s, text)) ret; |
458 | text = s; |
459 | cancelRecognition(); |
460 | } |
461 | });*/ |
462 | } |
463 | } |
464 | |
465 | static void setBorderAndStatus(int color, S status) { |
466 | if (color != borderColor || neq(status, statusText)) { |
467 | borderColor = color; |
468 | statusText = status; |
469 | if (lastEmotion != null) |
470 | emo(lastEmotion); |
471 | } |
472 | } |
473 | |
474 | static void setBorderColor(int color) { |
475 | if (color != borderColor) { |
476 | borderColor = color; |
477 | if (lastEmotion != null) |
478 | emo(lastEmotion); |
479 | } |
480 | } |
481 | |
482 | static void happy() { emo("happy"); } |
483 | static void unhappy() { emo("unhappy"); } |
484 | static void oh() { emo("oh"); } |
485 | static void bored() { emo("bored"); } |
486 | static void wideMouth() { emo("wide mouth"); } |
487 | |
488 | static void disappear() { |
489 | if (!showPicture) ret; |
490 | happy(); |
491 | androidLater(1000, r { |
492 | androidShowFullScreenColor(0xFFFFFFFF); |
493 | androidLater(1000, r { |
494 | System.exit(0); // totally unrecommended but works |
495 | }); |
496 | }); |
497 | } |
498 | |
499 | sS inputViewLabel() { |
500 | ret lang("Or type here:", "Oder hier tippen:"); |
501 | } |
Began life as a copy of #1023282
download show line numbers debug dex old transpilations
Travelled to 7 computer(s): bhatertpkbcr, cfunsshuasjs, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt
No comments. add comment
Snippet ID: | #1023361 |
Snippet name: | Android Cat Main (include, v3, OLD, single-user) |
Eternal ID of this version: | #1023361/4 |
Text MD5: | ebd5c5e98ad6033b9d588e0292c1b7b1 |
Author: | stefan |
Category: | javax / a.i. |
Type: | JavaX fragment (include) |
Public (visible to everyone): | Yes |
Archived (hidden from active list): | No |
Created/modified: | 2019-06-13 12:45:18 |
Source code size: | 14049 bytes / 501 lines |
Pitched / IR pitched: | No / No |
Views / Downloads: | 244 / 366 |
Version history: | 3 change(s) |
Referenced in: | [show references] |