Not logged in.  Login/Logout/Register | List snippets | | Create snippet | Upload image | Upload data

528
LINES

< > BotCompany Repo | #1023371 // Android Cat Include (v5, multi-user, not used anymore)

JavaX fragment (include)

1  
set flag NoAWT. set flag Android.
2  
3  
import android.app.*;
4  
import android.content.*;
5  
import android.os.Bundle;
6  
import android.view.View;
7  
import android.view.View.OnClickListener;
8  
import android.speech.RecognitionListener;
9  
import android.speech.RecognizerIntent;
10  
import android.speech.SpeechRecognizer;
11  
import android.widget.Button;
12  
import android.widget.TextView;
13  
import android.util.Log;
14  
import android.media.*;
15  
16  
sS earURL = "http://botcompany.de:8083";
17  
18  
static SpeechRecognizer sr;
19  
static final String TAG = "MyStt3Activity";
20  
static S language = "en-US";
21  
static int extraResults = 1;
22  
23  
static bool fastSynthesis = false; // use device voice (faster)
24  
static bool noSpeech = false; // disable speech output
25  
26  
static bool manualMode = false; // Start recognition on click only
27  
sbool muteAlways, printErrors;
28  
sbool listenAfterTalk = true; // it works now
29  
30  
static volatile bool listening, recognitionCancelled;
31  
32  
static new L<S> history;
33  
static Lock speechLock = lock();
34  
35  
static boolean showPicture = true; // global switch. if false, you get the normal text display instead of the smiley
36  
  
37  
static L<S> emotions = litlist(
38  
  "#1001283", "happy",
39  
  "#1001284", "unhappy",
40  
  "#1001285", "oh",
41  
  "#1001286", "bored",
42  
  "#1001287", "wide mouth");
43  
44  
static float statusFontSize = /*25*/17;
45  
static float userFontSize = 25;
46  
static float myFontSize = 25;
47  
static int borderColor = 0xFFFFFFFF;
48  
static int waitColor = /*0xFFFF0000*/ 0xFFFFFF00; // yellow
49  
static S lastEmotionImage, lastEmotion /*, onScreenText = ""*/;
50  
static ImageView emoView;
51  
static TextView statusView, userTextView, myTextView, lblInputView;
52  
static LinearLayout leftFlag, rightFlag;
53  
sS statusText;
54  
static EditText inputView;
55  
sbool recogOnActivate = true; // start listening when app activated
56  
sbool hideTitleBar;
57  
static CatConnector connector;
58  
static LS postponed = syncList(); // output lines postponed because user is speaking
59  
60  
svoid androidCatMain {
61  
  if (androidIsAdminMode()) {
62  
    print("Going admin.");
63  
    androidUnsetAdminMode();
64  
    removeMyselfAsInjection();
65  
    aShowStartScreen();
66  
    ret;
67  
  }
68  
    
69  
  if (hideTitleBar) aHideTitleBar();
70  
  
71  
  androidSay_keepEngine = true;
72  
  
73  
  if (muteAlways) androidMuteAudio();
74  
  
75  
  S hello = null;
76  
  
77  
  /*if (args.length != 0 && args[0].equals("nogfx"))
78  
    setOpt(getMainClass(), "showPicture", false);*/
79  
  
80  
  try {
81  
    history.add("*");
82  
    //hello = callStaticAnswerMethod("*", history);
83  
    if (hello == null) hello = german() ? "hallo" : "hello";
84  
  } catch (Throwable e) {
85  
    e.printStackTrace();
86  
    return;
87  
  }
88  
  
89  
  if (!androidIsAdminMode())
90  
    aClearConsole();
91  
92  
  listening = true; // so user can cancel early
93  
  //if (!noSpeech) say(hello);
94  
  justASec(); // show interface
95  
  callOptMC('happy);
96  
  
97  
  connector = new CatConnectorImpl(androidAssistant_token()); 
98  
  connector.startEar(vf<S> say);
99  
100  
  // setLanguage(language);
101  
  
102  
  aAddMenuItems("Switch to manual mode", "Switch to auto mode");
103  
  
104  
  // init engine?
105  
  if (german()) androidSayInGerman(""); else androidSayInEnglish("");
106  
  
107  
  if (recognitionCancelled) recognitionCancelled = false;
108  
  else
109  
    androidUI(f newRecognizer);
110  
111  
  noMainDone();
112  
}
113  
114  
static void newRecognizer() {
115  
  //print("listening");
116  
  listening = true;
117  
  sr = SpeechRecognizer.createSpeechRecognizer(androidActivity());
118  
  sr.setRecognitionListener(new listener());        
119  
  recog();
120  
}
121  
122  
static class listener implements RecognitionListener {
123  
  public void onReadyForSpeech(Bundle params) {
124  
    if (recognitionCancelled) {
125  
      recognitionCancelled = false;
126  
      sr.stopListening();
127  
      ret;
128  
    }
129  
    callOptMC('setBorderAndStatus, 0xFF66FF66,
130  
      german() ? "JETZT SPRECHEN!" : "TALK NOW!");
131  
    callOptMC('oh);
132  
    //showText(german() ? "SAG WAS." : "TALK NOW.");
133  
  }
134  
  
135  
  public void onBeginningOfSpeech() {
136  
    //showText("User talks");
137  
    //callOptMC('oh);
138  
    if (!manualMode && !muteAlways)
139  
      androidMuteAudio(); // Mute now, so we don't hear the end-of-speech sound
140  
  }
141  
  
142  
  public void onRmsChanged(float rmsdB) {}
143  
  public void onBufferReceived(byte[] buffer) {}
144  
  
145  
  public void onEndOfSpeech() {
146  
    ping();
147  
    //showText("onEndOfSpeech");
148  
    callOptMC('setBorderAndStatus, aWhite(), baseStatus());
149  
  }
150  
  
151  
  public void onError(int error) {
152  
     ping();
153  
     listening = false;
154  
     if (printErrors)
155  
       if (error == 6) // timeout
156  
         print("speech timeout");
157  
       else
158  
         print("error " +  error); // screw the errors!
159  
     try {
160  
       sr.destroy();
161  
     } catch (Throwable e) {
162  
       print(e);
163  
     }
164  
     if (!manualMode)
165  
       newRecognizer();
166  
     else
167  
      callOptMC('setBorderAndStatus, aWhite(), baseStatus());
168  
     callOpt(getMainClass(), "happy");
169  
  }
170  
  
171  
  public void onResults(Bundle results) {
172  
    ping();
173  
    listening = false;
174  
    //showText("onResults");
175  
    ArrayList<S> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
176  
    fS s = data.get(0);
177  
    onInput(s, false);
178  
  }
179  
180  
  public void onPartialResults(Bundle partialResults) {
181  
    print("onPartialResults");
182  
  }
183  
  
184  
  public void onEvent(int eventType, Bundle params) {
185  
    print("onEvent" + eventType);
186  
  }
187  
}
188  
 
189  
svoid onInput(S _s, final bool typed) {
190  
  fS s = trim(_s); if (empty(s)) ret;
191  
  thread "onInput" {
192  
    connector.sendLine(s, typed);
193  
    aSetText(userTextView, lang("You: ", "Du: ") + s);
194  
    aPutViewBehindOtherView(userTextView, myTextView);
195  
  
196  
    showText(
197  
      (typed 
198  
        ? lang("You typed: ", "Du schrubst: ")
199  
        : lang("I heard: ", "Ich habe geh\u00f6rt: ")) + quote(s));
200  
  
201  
    // TODO: fix the java strings umlaut problem
202  
     
203  
    final boolean goodbye = match3("goodbye", s) || match3("bye", s) || match3("tsch\u00fcss", s) || match3("tsch\u00fcss ...", s);
204  
  
205  
    // get answer
206  
    
207  
    history.add(s);
208  
    
209  
    handleCommand(s);
210  
    
211  
    S answer;
212  
    try {
213  
      answer = goodbye ? "tsch\u00fcss" : callStaticAnswerMethod(s, history);
214  
    } catch (Throwable e) {
215  
      e.printStackTrace();
216  
      appendToFile(getProgramFile("errors.txt"), getTheStackTrace(e));
217  
      answer = "Fehler";
218  
    }
219  
    
220  
    if (answer != null)
221  
      print(answer);
222  
      
223  
    androidUI(r {
224  
      if (goodbye) {
225  
        print("\nGOODBYE!");
226  
        sr.destroy();
227  
        
228  
        callOpt(getMainClass(), "disappear");
229  
      } else {
230  
        sr.stopListening();
231  
        listening = false;
232  
        //newRecognizer(); // always make a new one - gives endless errors
233  
        if (!manualMode)
234  
          recog();
235  
      }
236  
    });
237  
  } // end of thread
238  
}
239  
240  
svoid recog() {
241  
  if (sr == null) ret with newRecognizer();
242  
  print("recog");
243  
  listening = true;
244  
  justASec();
245  
  Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);        
246  
  intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
247  
  intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
248  
  intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test");
249  
250  
  intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, extraResults); 
251  
  
252  
  // customize recognition time settings
253  
  callOpt(mc(), "recogTimeSettings", intent);
254  
  
255  
  sr.startListening(intent);
256  
  print("started listening");
257  
}
258  
259  
svoid say(fS s) {
260  
  ping();
261  
  lock speechLock;
262  
  showText(">> " + s);
263  
  aSetText(myTextView, lang("Me: ", "Ich: ") + s);
264  
  aPutViewBehindOtherView(myTextView, userTextView);
265  
  if (manualMode && listening) {
266  
    print("User is speaking, postponing speech output.");
267  
    postponed.add(s);
268  
    ret;
269  
  }
270  
  androidUI_wait(f stopListening);
271  
  history.add(s);
272  
  if (!noSpeech) {
273  
    androidUnmuteAudio();
274  
    callOptMC('oh);
275  
    if (fastSynthesis) {
276  
      if (german()) androidSayInGerman(s); else androidSayInEnglish(s);
277  
    } else {
278  
      File mp3 = cereproc_silent(german() ? "Leopold" : "Jack", s);
279  
      androidPlayMp3(androidActivity(), mp3);
280  
    }
281  
    callOptMC('happy);
282  
    if (listenAfterTalk)
283  
      //androidUI_noWait(r startListening); // works, but freezes UI
284  
      androidUI_noWait(r newRecognizer);
285  
  }
286  
  if (muteAlways)
287  
    androidMuteAudio();
288  
}
289  
290  
static void stopListening() {
291  
  listening = false;
292  
  if (sr != null)
293  
    sr.stopListening();
294  
}
295  
296  
svoid startListening() {
297  
  if (listening) ret;
298  
  listening = true;
299  
  recog();
300  
}
301  
  
302  
sbool german() {
303  
  ret swic(language, "de");
304  
}
305  
306  
svoid switchToManual {
307  
  stopListening();
308  
  manualMode = true;
309  
  androidUnmuteAudio();
310  
}
311  
312  
svoid switchToAuto {
313  
  manualMode = false;
314  
  startListening();
315  
  androidUnmuteAudio();
316  
}
317  
318  
svoid onMenuCommand(S s) {
319  
  //print("menu cmd: " + s);
320  
  handleCommand(s);
321  
}
322  
323  
// spoken, typed or through menu
324  
svoid handleCommand(S s) {
325  
  if "stop listening|Switch to manual mode" switchToManual();
326  
  if "Switch to auto mode" switchToAuto();
327  
  if "german|deutsch" setLanguage("de-DE");
328  
  if "english|englisch" setLanguage("en-US");
329  
}
330  
331  
svoid cleanMeUp_leo {
332  
  if (sr != null) {
333  
    sr.destroy();
334  
    sr = null;
335  
  }
336  
}
337  
338  
sS baseStatus() {
339  
  ret !manualMode ? "" :
340  
    german() ? "KLICKEN ZUM SPRECHEN" : "CLICK ON ME IF YOU WANT TO TALK";
341  
}
342  
343  
sS lang(S en, S de) {
344  
  ret german() ? de : en;
345  
}
346  
347  
svoid setLanguage(S l) {
348  
  language = l;
349  
  setCurrentBotLanguage(l);
350  
  aSetText(lblInputView, inputViewLabel());
351  
  androidUI_noWait(r newRecognizer);
352  
}
353  
354  
svoid justASec {
355  
  callOptMC('setBorderAndStatus, waitColor,
356  
    german() ? "BITTE WARTEN" : "JUST A SEC"); // (initializing speech recognizer)
357  
}
358  
359  
// Don't use - freezes UI
360  
svoid _cancelRecognition {
361  
  //print("Cancelling recognition " + listening + " " + (sr != null));
362  
  recognitionCancelled = true;
363  
  //stopListening();
364  
  //listening = false;
365  
  //if (sr != null) sr.cancel();
366  
  //callOptMC('setBorderAndStatus, aWhite(), baseStatus());
367  
}
368  
369  
static void showText(S text) {
370  
  print(text);
371  
  /*if (neq(onScreenText, text) && lastEmotion != null) {
372  
    onScreenText = text;
373  
    emo_show();
374  
  }*/
375  
}
376  
  
377  
static void emo(S emotion) {
378  
  if (!showPicture) return;
379  
  int i;
380  
  for (i = 0; i < emotions.size(); i += 2)
381  
    if (emotions.get(i+1).equalsIgnoreCase(emotion))
382  
      break;
383  
  if (i >= emotions.size()) {
384  
    print("Emotion not found: " + emotion);
385  
    // use the last one from the list as default
386  
    i -= 2;
387  
  }
388  
  lastEmotionImage = emotions.get(i);
389  
  lastEmotion = emotions.get(i+1);
390  
  emo_show();
391  
}
392  
393  
static void emo_show() {
394  
  if (!showPicture) return;
395  
  
396  
  androidUI {
397  
    Runnable onClick = r {
398  
      if (!manualMode) ret;
399  
      
400  
      //if (listening)
401  
      if (borderColor != -1) {
402  
        //androidLater(500, r { stopListening(); });
403  
        //showText("stop");
404  
        stopListening();
405  
      } else {
406  
        //showText ("start");
407  
        newRecognizer();
408  
      }
409  
    };
410  
  
411  
    if (statusView == null) {
412  
      // init UI
413  
      
414  
      statusView = aFontSize(statusFontSize, aSetForeground(aBlack(), aCenteredTextView()));
415  
      inputView = aSingleLineEditText();
416  
      aOnEnter(inputView, r { onInput(aGetText(inputView), true) });
417  
      //aOnChange(inputView, f cancelRecognition); // freezes!?
418  
      //aOnChange(inputView, r { listening = false });
419  
      //aOnChange(inputView, f cancelRecognition);
420  
      lblInputView = aFontSize(20, aBottomTextView(inputViewLabel()));
421  
      userTextView = aFontSize(userFontSize, aSetForeground(0xFF000055, aTextViewWithWindowFocusChangeNotify(voidfunc(Bool windowFocused) {
422  
        if (windowFocused && recogOnActivate) newRecognizer();
423  
      })));
424  
      myTextView = aFontSize(myFontSize, aSetForeground(0xFF005500, aRightAlignedTextView());
425  
      androidShow(aVerticalLinearLayout(
426  
        statusView,
427  
        aWestCenterAndEast/*_alignTop2*/(
428  
          leftFlag = aVerticalLinearLayout(androidClickableImage(#1101639, 0 /*transparent*/, r { setLanguage("en-US") })),
429  
          emoView = androidClickableImage(lastEmotionImage, aWhite(), onClick),
430  
          rightFlag = aVerticalLinearLayout(androidClickableImage(#1101638, 0 /*transparent*/, r { setLanguage("de-DE") }))
431  
        ),
432  
        userTextView,
433  
        myTextView,
434  
        androidPrintLogScrollView(),
435  
        aWestAndCenter(lblInputView, inputView)));
436  
    }
437  
    
438  
    if (statusText == null) statusText = baseStatus();
439  
    aSetText(statusView, statusText);
440  
    aSetImageFromSnippet(emoView, lastEmotionImage);
441  
    aSetBackground(emoView, borderColor);
442  
    aSetBackground(eq(language, "en-US") ? borderColor : 0xFFCCCCCC, leftFlag);
443  
    aSetBackground(eq(language, "de-DE") ? borderColor : 0xFFCCCCCC, rightFlag);
444  
445  
    
446  
    /*doEvery(1000, new Runnable {
447  
      S text = "";
448  
      
449  
      public void run() {
450  
        S s = aGetText(inputView);
451  
        if (eq(s, text)) ret;
452  
        text = s;
453  
        cancelRecognition();
454  
      }
455  
    });*/
456  
  }
457  
}
458  
459  
static void setBorderAndStatus(int color, S status) {
460  
  if (color != borderColor || neq(status, statusText)) {
461  
    borderColor = color;
462  
    statusText = status;
463  
    if (lastEmotion != null)
464  
      emo(lastEmotion);
465  
    if (!(manualMode && listening) && nempty(postponed))
466  
      for (S s : getAndClearList(postponed))
467  
        say(s);
468  
  }
469  
}
470  
471  
static void setBorderColor(int color) {
472  
  if (color != borderColor) {
473  
    borderColor = color;
474  
    if (lastEmotion != null)
475  
      emo(lastEmotion);
476  
  }
477  
}
478  
    
479  
static void happy() { emo("happy"); }
480  
static void unhappy() { emo("unhappy"); }
481  
static void oh() { emo("oh"); }
482  
static void bored() { emo("bored"); }
483  
static void wideMouth() { emo("wide mouth"); }
484  
485  
static void disappear() {
486  
  if (!showPicture) ret;
487  
  happy();
488  
  androidLater(1000, r {
489  
    androidShowFullScreenColor(0xFFFFFFFF);
490  
    androidLater(1000, r {
491  
      System.exit(0); // totally unrecommended but works
492  
    });
493  
  });
494  
}
495  
496  
sS inputViewLabel() {
497  
  ret lang("Or type here:", "Oder hier tippen:");
498  
}
499  
500  
sinterface CatConnector {
501  
  void sendLine(S line, bool typed);
502  
  AutoCloseable startEar(VF1<S> onLine);
503  
}
504  
505  
sclass CatConnectorImpl implements CatConnector {
506  
  S token;
507  
  
508  
  *() {}
509  
  *(S *token) {}
510  
  
511  
  public void sendLine(S line, bool typed) {
512  
    postPage(earURL + "/heard", +token, +line, typed := typed ? "1" : null, +language);
513  
  }
514  
  
515  
  public AutoCloseable startEar(final VF1<S> onLine) {
516  
    thread "Long-Poll" {
517  
      repeat with sleep 1 {
518  
        pcall {
519  
          // TODO: interrupt connection on cleanup
520  
          S action = postPageSilently(earURL + "/poll", +token);
521  
          for (S s : lines(action))
522  
            pcallF(onLine, s);
523  
        }
524  
      }
525  
    }
526  
    null;
527  
  }
528  
}

Author comment

Began life as a copy of #1023361

download  show line numbers  debug dex  old transpilations   

Travelled to 7 computer(s): bhatertpkbcr, cfunsshuasjs, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt

No comments. add comment

Snippet ID: #1023371
Snippet name: Android Cat Include (v5, multi-user, not used anymore)
Eternal ID of this version: #1023371/17
Text MD5: c0b63b7fab2925cd04b56d9cf5559d88
Author: stefan
Category: javax / a.i.
Type: JavaX fragment (include)
Public (visible to everyone): Yes
Archived (hidden from active list): No
Created/modified: 2019-06-13 12:46:32
Source code size: 14665 bytes / 528 lines
Pitched / IR pitched: No / No
Views / Downloads: 258 / 310
Version history: 16 change(s)
Referenced in: [show references]