Libraryless. Click here for Pure Java version (6660L/47K/162K).
1 | !7 |
2 | |
3 | p { |
4 | language = currentBotLanguage(); |
5 | muteAlways = false; |
6 | fastSynthesis = true; |
7 | manualMode = false; |
8 | halfAutoMode = false; // listen again when something was heard |
9 | androidCatMain(); |
10 | } |
11 | |
12 | set flag NoAWT. set flag Android. |
13 | |
14 | import android.app.*; |
15 | import android.content.*; |
16 | import android.os.Bundle; |
17 | import android.view.View; |
18 | import android.view.View.OnClickListener; |
19 | import android.speech.RecognitionListener; |
20 | import android.speech.RecognizerIntent; |
21 | import android.speech.SpeechRecognizer; |
22 | import android.widget.Button; |
23 | import android.widget.TextView; |
24 | import android.util.Log; |
25 | import android.media.*; |
26 | |
27 | sS earURL = "http://botcompany.de:8083"; |
28 | |
29 | static SpeechRecognizer sr; |
30 | static final String TAG = "MyStt3Activity"; |
31 | static S language = "en-US"; |
32 | static int extraResults = 1; |
33 | |
34 | static bool fastSynthesis = false; // use device voice (faster) |
35 | static bool noSpeech = false; // disable speech output |
36 | |
37 | static bool manualMode = false; // Start recognition on click only |
38 | sbool halfAutoMode; |
39 | sbool muteAlways, printErrors; |
40 | sbool listenAfterTalk = true; // it works now |
41 | |
42 | static volatile bool listening, recognitionCancelled; |
43 | |
44 | static new L<S> history; |
45 | static Lock speechLock = lock(); |
46 | |
47 | static boolean showPicture = true; // global switch. if false, you get the normal text display instead of the smiley |
48 | |
49 | static L<S> emotions = litlist( |
50 | "#1001283", "happy", |
51 | "#1001284", "unhappy", |
52 | "#1001285", "oh", |
53 | "#1001286", "bored", |
54 | "#1001287", "wide mouth"); |
55 | |
56 | static float statusFontSize = /*25*/17; |
57 | static float userFontSize = 25; |
58 | static float myFontSize = 25; |
59 | static int borderColor = 0xFFFFFFFF; |
60 | static int waitColor = /*0xFFFF0000*/ 0xFFFFFF00; // yellow |
61 | static S lastEmotionImage, lastEmotion /*, onScreenText = ""*/; |
62 | static ImageView emoView; |
63 | static TextView statusView, userTextView, myTextView, lblInputView; |
64 | static LinearLayout leftFlag, rightFlag; |
65 | sS statusText; |
66 | static EditText inputView; |
67 | sbool recogOnActivate = true; // start listening when app activated |
68 | sbool hideTitleBar; |
69 | static CatConnector connector; |
70 | static LS postponed = syncList(); // output lines postponed because user is speaking |
71 | |
72 | svoid androidCatMain { |
73 | if (androidIsAdminMode()) { |
74 | print("Going admin."); |
75 | androidUnsetAdminMode(); |
76 | removeMyselfAsInjection(); |
77 | aShowStartScreen(); |
78 | ret; |
79 | } |
80 | |
81 | if (hideTitleBar) aHideTitleBar(); |
82 | |
83 | androidSay_keepEngine = true; |
84 | |
85 | if (muteAlways) androidMuteAudio(); |
86 | |
87 | S hello = null; |
88 | |
89 | /*if (args.length != 0 && args[0].equals("nogfx")) |
90 | setOpt(getMainClass(), "showPicture", false);*/ |
91 | |
92 | try { |
93 | history.add("*"); |
94 | //hello = callStaticAnswerMethod("*", history); |
95 | if (hello == null) hello = german() ? "hallo" : "hello"; |
96 | } catch (Throwable e) { |
97 | e.printStackTrace(); |
98 | return; |
99 | } |
100 | |
101 | if (!androidIsAdminMode()) |
102 | aClearConsole(); |
103 | |
104 | listening = true; // so user can cancel early |
105 | //if (!noSpeech) say(hello); |
106 | justASec(); // show interface |
107 | callOptMC('happy); |
108 | |
109 | connector = new CatConnectorImpl(androidAssistant_token()); |
110 | connector.startEar(vf<S> say); |
111 | |
112 | // setLanguage(language); |
113 | |
114 | aAddMenuItems("Switch to manual mode", "Switch to auto mode"); |
115 | |
116 | // init engine? |
117 | if (german()) androidSayInGerman(""); else androidSayInEnglish(""); |
118 | |
119 | if (recognitionCancelled) recognitionCancelled = false; |
120 | else |
121 | androidUI(f newRecognizer); |
122 | |
123 | noMainDone(); |
124 | } |
125 | |
126 | static void newRecognizer() { |
127 | //print("listening"); |
128 | listening = true; |
129 | sr = SpeechRecognizer.createSpeechRecognizer(androidActivity()); |
130 | sr.setRecognitionListener(new listener()); |
131 | recog(); |
132 | } |
133 | |
134 | static class listener implements RecognitionListener { |
135 | public void onReadyForSpeech(Bundle params) { |
136 | if (recognitionCancelled) { |
137 | recognitionCancelled = false; |
138 | sr.stopListening(); |
139 | ret; |
140 | } |
141 | callOptMC('setBorderAndStatus, 0xFF66FF66, |
142 | german() ? "JETZT SPRECHEN!" : "TALK NOW!"); |
143 | callOptMC('oh); |
144 | //showText(german() ? "SAG WAS." : "TALK NOW."); |
145 | } |
146 | |
147 | public void onBeginningOfSpeech() { |
148 | //showText("User talks"); |
149 | //callOptMC('oh); |
150 | if (!manualMode && !muteAlways) |
151 | androidMuteAudio(); // Mute now, so we don't hear the end-of-speech sound |
152 | } |
153 | |
154 | public void onRmsChanged(float rmsdB) {} |
155 | public void onBufferReceived(byte[] buffer) {} |
156 | |
157 | public void onEndOfSpeech() { |
158 | ping(); |
159 | //showText("onEndOfSpeech"); |
160 | callOptMC('setBorderAndStatus, aWhite(), baseStatus()); |
161 | } |
162 | |
163 | public void onError(int error) { |
164 | ping(); |
165 | listening = false; |
166 | if (printErrors) |
167 | if (error == 6) // timeout |
168 | print("speech timeout"); |
169 | else |
170 | print("error " + error); // screw the errors! |
171 | try { |
172 | sr.destroy(); |
173 | } catch (Throwable e) { |
174 | print(e); |
175 | } |
176 | if (!manualMode) |
177 | newRecognizer(); |
178 | else |
179 | callOptMC('setBorderAndStatus, aWhite(), baseStatus()); |
180 | callOpt(getMainClass(), "happy"); |
181 | } |
182 | |
183 | public void onResults(Bundle results) { |
184 | ping(); |
185 | listening = false; |
186 | //showText("onResults"); |
187 | ArrayList<S> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); |
188 | fS s = data.get(0); |
189 | onInput(s, false); |
190 | } |
191 | |
192 | public void onPartialResults(Bundle partialResults) { |
193 | print("onPartialResults"); |
194 | } |
195 | |
196 | public void onEvent(int eventType, Bundle params) { |
197 | print("onEvent" + eventType); |
198 | } |
199 | } |
200 | |
201 | svoid onInput(S _s, final bool typed) { |
202 | fS s = trim(_s); if (empty(s)) ret; |
203 | thread "onInput" { |
204 | connector.sendLine(s, typed); |
205 | aSetText(userTextView, lang("You: ", "Du: ") + s); |
206 | aPutViewBehindOtherView(userTextView, myTextView); |
207 | |
208 | showText( |
209 | (typed |
210 | ? lang("You typed: ", "Du schrubst: ") |
211 | : lang("I heard: ", "Ich habe geh\u00f6rt: ")) + quote(s)); |
212 | |
213 | // TODO: fix the java strings umlaut problem |
214 | |
215 | final boolean goodbye = match3("goodbye", s) || match3("bye", s) || match3("tsch\u00fcss", s) || match3("tsch\u00fcss ...", s); |
216 | |
217 | // get answer |
218 | |
219 | history.add(s); |
220 | |
221 | handleCommand(s); |
222 | |
223 | S answer; |
224 | try { |
225 | answer = goodbye ? "tsch\u00fcss" : callStaticAnswerMethod(s, history); |
226 | } catch (Throwable e) { |
227 | e.printStackTrace(); |
228 | appendToFile(getProgramFile("errors.txt"), getTheStackTrace(e)); |
229 | answer = "Fehler"; |
230 | } |
231 | |
232 | if (answer != null) |
233 | print(answer); |
234 | |
235 | androidUI(r { |
236 | if (goodbye) { |
237 | print("\nGOODBYE!"); |
238 | sr.destroy(); |
239 | |
240 | callOpt(getMainClass(), "disappear"); |
241 | } else { |
242 | print("No goodbye"); |
243 | sr.stopListening(); |
244 | listening = false; |
245 | //newRecognizer(); // always make a new one - gives endless errors |
246 | if (!manualMode || halfAutoMode) |
247 | recog(); |
248 | } |
249 | }); |
250 | } // end of thread |
251 | } |
252 | |
253 | svoid recog() { |
254 | if (sr == null) ret with newRecognizer(); |
255 | print("recog"); |
256 | listening = true; |
257 | justASec(); |
258 | Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); |
259 | intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); |
260 | intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); |
261 | intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test"); |
262 | |
263 | intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, extraResults); |
264 | |
265 | // customize recognition time settings |
266 | callOpt(mc(), "recogTimeSettings", intent); |
267 | |
268 | sr.startListening(intent); |
269 | print("started listening"); |
270 | } |
271 | |
272 | svoid say(fS s) { |
273 | ping(); |
274 | lock speechLock; |
275 | showText(">> " + s); |
276 | aSetText(myTextView, lang("Me: ", "Ich: ") + s); |
277 | aPutViewBehindOtherView(myTextView, userTextView); |
278 | if (manualMode && listening) { |
279 | print("User is speaking, postponing speech output."); |
280 | postponed.add(s); |
281 | ret; |
282 | } |
283 | androidUI_wait(f stopListening); |
284 | history.add(s); |
285 | if (!noSpeech) { |
286 | androidUnmuteAudio(); |
287 | callOptMC('oh); |
288 | if (fastSynthesis) { |
289 | if (german()) androidSayInGerman(s); else androidSayInEnglish(s); |
290 | } else { |
291 | File mp3 = cereproc_silent(german() ? "Leopold" : "Jack", s); |
292 | androidPlayMp3(androidActivity(), mp3); |
293 | } |
294 | callOptMC('happy); |
295 | if (listenAfterTalk) |
296 | //androidUI_noWait(r startListening); // works, but freezes UI |
297 | androidUI_noWait(r newRecognizer); |
298 | } |
299 | if (muteAlways) |
300 | androidMuteAudio(); |
301 | } |
302 | |
303 | static void stopListening() { |
304 | listening = false; |
305 | if (sr != null) |
306 | sr.stopListening(); |
307 | } |
308 | |
309 | svoid startListening() { |
310 | if (listening) ret; |
311 | listening = true; |
312 | recog(); |
313 | } |
314 | |
315 | sbool german() { |
316 | ret swic(language, "de"); |
317 | } |
318 | |
319 | svoid switchToManual { |
320 | stopListening(); |
321 | manualMode = true; |
322 | androidUnmuteAudio(); |
323 | } |
324 | |
325 | svoid switchToAuto { |
326 | manualMode = false; |
327 | startListening(); |
328 | androidUnmuteAudio(); |
329 | } |
330 | |
331 | svoid onMenuCommand(S s) { |
332 | //print("menu cmd: " + s); |
333 | handleCommand(s); |
334 | } |
335 | |
336 | // spoken, typed or through menu |
337 | svoid handleCommand(S s) { |
338 | if "stop listening|Switch to manual mode" switchToManual(); |
339 | if "Switch to auto mode" switchToAuto(); |
340 | if "german|deutsch" setLanguage("de-DE"); |
341 | if "english|englisch" setLanguage("en-US"); |
342 | } |
343 | |
344 | svoid cleanMeUp_leo { |
345 | if (sr != null) { |
346 | sr.destroy(); |
347 | sr = null; |
348 | } |
349 | } |
350 | |
351 | sS baseStatus() { |
352 | ret !manualMode ? "" : |
353 | german() ? "KLICKEN ZUM SPRECHEN" : "CLICK ON ME IF YOU WANT TO TALK"; |
354 | } |
355 | |
356 | sS lang(S en, S de) { |
357 | ret german() ? de : en; |
358 | } |
359 | |
360 | svoid setLanguage(S l) { |
361 | language = l; |
362 | setCurrentBotLanguage(l); |
363 | aSetText(lblInputView, inputViewLabel()); |
364 | androidUI_noWait(r newRecognizer); |
365 | } |
366 | |
367 | svoid justASec { |
368 | callOptMC('setBorderAndStatus, waitColor, |
369 | german() ? "BITTE WARTEN" : "JUST A SEC"); // (initializing speech recognizer) |
370 | } |
371 | |
372 | // Don't use - freezes UI |
373 | svoid _cancelRecognition { |
374 | //print("Cancelling recognition " + listening + " " + (sr != null)); |
375 | recognitionCancelled = true; |
376 | //stopListening(); |
377 | //listening = false; |
378 | //if (sr != null) sr.cancel(); |
379 | //callOptMC('setBorderAndStatus, aWhite(), baseStatus()); |
380 | } |
381 | |
382 | static void showText(S text) { |
383 | print(text); |
384 | /*if (neq(onScreenText, text) && lastEmotion != null) { |
385 | onScreenText = text; |
386 | emo_show(); |
387 | }*/ |
388 | } |
389 | |
390 | static void emo(S emotion) { |
391 | if (!showPicture) return; |
392 | int i; |
393 | for (i = 0; i < emotions.size(); i += 2) |
394 | if (emotions.get(i+1).equalsIgnoreCase(emotion)) |
395 | break; |
396 | if (i >= emotions.size()) { |
397 | print("Emotion not found: " + emotion); |
398 | // use the last one from the list as default |
399 | i -= 2; |
400 | } |
401 | lastEmotionImage = emotions.get(i); |
402 | lastEmotion = emotions.get(i+1); |
403 | emo_show(); |
404 | } |
405 | |
406 | static void emo_show() { |
407 | if (!showPicture) return; |
408 | |
409 | androidUI { |
410 | Runnable onClick = r { |
411 | if (!manualMode) ret; |
412 | |
413 | //if (listening) |
414 | if (borderColor != -1) { |
415 | //androidLater(500, r { stopListening(); }); |
416 | //showText("stop"); |
417 | stopListening(); |
418 | } else { |
419 | //showText ("start"); |
420 | newRecognizer(); |
421 | } |
422 | }; |
423 | |
424 | if (statusView == null) { |
425 | // init UI |
426 | |
427 | statusView = aFontSize(statusFontSize, aSetForeground(aBlack(), aCenteredTextView())); |
428 | inputView = aSingleLineEditText(); |
429 | aOnEnter(inputView, r { onInput(aGetText(inputView), true) }); |
430 | //aOnChange(inputView, f cancelRecognition); // freezes!? |
431 | //aOnChange(inputView, r { listening = false }); |
432 | //aOnChange(inputView, f cancelRecognition); |
433 | lblInputView = aFontSize(20, aBottomTextView(inputViewLabel())); |
434 | userTextView = aFontSize(userFontSize, aSetForeground(0xFF000055, aTextViewWithWindowFocusChangeNotify(voidfunc(Bool windowFocused) { |
435 | print("windowFocusChange " + windowFocused); |
436 | if (windowFocused) { |
437 | //if (recogOnActivate) newRecognizer(); // let the server do it now |
438 | thread { connector.sendEvent('windowFocused); } |
439 | } |
440 | }))); |
441 | myTextView = aFontSize(myFontSize, aSetForeground(0xFF005500, aRightAlignedTextView()); |
442 | androidShow(aVerticalLinearLayout( |
443 | statusView, |
444 | aWestCenterAndEast/*_alignTop2*/( |
445 | leftFlag = aVerticalLinearLayout(androidClickableImage(#1101639, 0 /*transparent*/, r { setLanguage("en-US") })), |
446 | emoView = androidClickableImage(lastEmotionImage, aWhite(), onClick), |
447 | rightFlag = aVerticalLinearLayout(androidClickableImage(#1101638, 0 /*transparent*/, r { setLanguage("de-DE") })) |
448 | ), |
449 | userTextView, |
450 | myTextView, |
451 | androidPrintLogScrollView(), |
452 | aWestAndCenter(lblInputView, inputView))); |
453 | } |
454 | |
455 | if (statusText == null) statusText = baseStatus(); |
456 | aSetText(statusView, statusText); |
457 | aSetImageFromSnippet(emoView, lastEmotionImage); |
458 | aSetBackground(emoView, borderColor); |
459 | aSetBackground(eq(language, "en-US") ? borderColor : 0xFFCCCCCC, leftFlag); |
460 | aSetBackground(eq(language, "de-DE") ? borderColor : 0xFFCCCCCC, rightFlag); |
461 | |
462 | |
463 | /*doEvery(1000, new Runnable { |
464 | S text = ""; |
465 | |
466 | public void run() { |
467 | S s = aGetText(inputView); |
468 | if (eq(s, text)) ret; |
469 | text = s; |
470 | cancelRecognition(); |
471 | } |
472 | });*/ |
473 | } |
474 | } |
475 | |
476 | static void setBorderAndStatus(int color, S status) { |
477 | if (color != borderColor || neq(status, statusText)) { |
478 | borderColor = color; |
479 | statusText = status; |
480 | if (lastEmotion != null) |
481 | emo(lastEmotion); |
482 | if (!(manualMode && listening) && nempty(postponed)) |
483 | for (S s : getAndClearList(postponed)) |
484 | say(s); |
485 | } |
486 | } |
487 | |
488 | static void setBorderColor(int color) { |
489 | if (color != borderColor) { |
490 | borderColor = color; |
491 | if (lastEmotion != null) |
492 | emo(lastEmotion); |
493 | } |
494 | } |
495 | |
496 | static void happy() { emo("happy"); } |
497 | static void unhappy() { emo("unhappy"); } |
498 | static void oh() { emo("oh"); } |
499 | static void bored() { emo("bored"); } |
500 | static void wideMouth() { emo("wide mouth"); } |
501 | |
502 | static void disappear() { |
503 | if (!showPicture) ret; |
504 | happy(); |
505 | androidLater(1000, r { |
506 | androidShowFullScreenColor(0xFFFFFFFF); |
507 | androidLater(1000, r { |
508 | System.exit(0); // totally unrecommended but works |
509 | }); |
510 | }); |
511 | } |
512 | |
513 | sS inputViewLabel() { |
514 | ret lang("Or type here:", "Oder hier tippen:"); |
515 | } |
516 | |
517 | sinterface CatConnector { |
518 | void sendLine(S line, bool typed); |
519 | void sendEvent(S event); |
520 | AutoCloseable startEar(VF1<S> onLine); |
521 | } |
522 | |
523 | sclass CatConnectorImpl implements CatConnector { |
524 | S token; |
525 | |
526 | *() {} |
527 | *(S *token) {} |
528 | |
529 | public void sendLine(S line, bool typed) { |
530 | postPage(earURL + "/heard", +token, +line, typed := typed ? "1" : null, +language); |
531 | } |
532 | |
533 | public void sendEvent(S event) { |
534 | postPage(earURL + "/event", +token, what := event, +language); |
535 | } |
536 | |
537 | public AutoCloseable startEar(final VF1<S> onLine) { |
538 | thread "Long-Poll" { |
539 | repeat with sleep 1 { |
540 | pcall { |
541 | // TODO: interrupt connection on cleanup |
542 | S action = postPageSilently(earURL + "/poll", +token); |
543 | for (S s : lines(action)) |
544 | pcallF(onLine, s); |
545 | } |
546 | } |
547 | } |
548 | null; |
549 | } |
550 | } |
Began life as a copy of #1014854
download show line numbers debug dex old transpilations
Travelled to 11 computer(s): bhatertpkbcr, bzsirmaerhyh, cfunsshuasjs, mqmicjfqzzot, mqqgnosmbjvj, odkhaoglnmdk, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt, xrpafgyirdlv
No comments. add comment
Snippet ID: | #1023372 |
Snippet name: | Android Cat (Assistant, multi-user, LIVE) |
Eternal ID of this version: | #1023372/17 |
Text MD5: | 0c336c007e45cc1ecc57d206e3a64077 |
Transpilation MD5: | eb5ae562a3d9803df3746b3ca334fcb0 |
Author: | stefan |
Category: | javax / android / a.i. |
Type: | JavaX source code (Android) |
Public (visible to everyone): | Yes |
Archived (hidden from active list): | No |
Created/modified: | 2020-02-17 13:46:44 |
Source code size: | 15252 bytes / 550 lines |
Pitched / IR pitched: | No / No |
Views / Downloads: | 305 / 761 |
Version history: | 16 change(s) |
Referenced in: | [show references] |