Not logged in.  Login/Logout/Register | List snippets | | Create snippet | Upload image | Upload data

512
LINES

< > BotCompany Repo | #1023282 // Android Cat Main (include, v4, dev., broken?)

JavaX fragment (include) [tags: use-pretranspiled]

1  
set flag NoAWT. set flag Android.
2  
3  
import android.app.*;
4  
import android.content.*;
5  
import android.os.Bundle;
6  
import android.view.View;
7  
import android.view.View.OnClickListener;
8  
import android.speech.RecognitionListener;
9  
import android.speech.RecognizerIntent;
10  
import android.speech.SpeechRecognizer;
11  
import android.widget.Button;
12  
import android.widget.TextView;
13  
import android.util.Log;
14  
import android.media.*;
15  
16  
sS earURL = "http://botcompany.de:8082";
17  
18  
static SpeechRecognizer sr;
19  
static final String TAG = "MyStt3Activity";
20  
static S language = "en-US";
21  
static int extraResults = 1;
22  
23  
static bool fastSynthesis = false; // use device voice (faster)
24  
static bool noSpeech = false; // disable speech output
25  
26  
static bool manualMode = false; // Start recognition on click only
27  
sbool muteAlways, printErrors;
28  
sbool listenAfterTalk = true; // listen after assistant talked. works now
29  
sbool listenAfterRecognized = false; // listen again after some speech was recognized. implementing
30  
31  
static volatile bool listening, recognitionCancelled;
32  
33  
static new L<S> history;
34  
static Lock speechLock = lock();
35  
36  
static boolean showPicture = true; // global switch. if false, you get the normal text display instead of the smiley
37  
  
38  
static L<S> emotions = litlist(
39  
  "#1001283", "happy",
40  
  "#1001284", "unhappy",
41  
  "#1001285", "oh",
42  
  "#1001286", "bored",
43  
  "#1001287", "wide mouth");
44  
45  
static float statusFontSize = /*25*/17;
46  
static float userFontSize = 25;
47  
static float myFontSize = 25;
48  
static int borderColor = 0xFFFFFFFF;
49  
static int waitColor = /*0xFFFF0000*/ 0xFFFFFF00; // yellow
50  
static S lastEmotionImage, lastEmotion /*, onScreenText = ""*/;
51  
static ImageView emoView;
52  
static TextView statusView, userTextView, myTextView, lblInputView;
53  
static LinearLayout leftFlag, rightFlag;
54  
sS statusText;
55  
static EditText inputView;
56  
sbool recogOnActivate = true; // start listening when app activated
57  
sbool hideTitleBar;
58  
static Q handleInputQ;
59  
60  
svoid androidCatMain {
61  
  if (androidIsAdminMode()) {
62  
    print("Going admin.");
63  
    androidUnsetAdminMode();
64  
    removeMyselfAsInjection();
65  
    aShowStartScreen();
66  
    ret;
67  
  }
68  
    
69  
  if (hideTitleBar) aHideTitleBar();
70  
  
71  
  androidSay_keepEngine = true;
72  
  
73  
  if (muteAlways) androidMuteAudio();
74  
  
75  
  S hello = null;
76  
  
77  
  /*if (args.length != 0 && args[0].equals("nogfx"))
78  
    setOpt(getMainClass(), "showPicture", false);*/
79  
  
80  
  try {
81  
    history.add("*");
82  
    //hello = callStaticAnswerMethod("*", history);
83  
    if (hello == null) hello = german() ? "hallo" : "hello";
84  
  } catch (Throwable e) {
85  
    e.printStackTrace();
86  
    return;
87  
  }
88  
  
89  
  if (!androidIsAdminMode())
90  
    aClearConsole();
91  
92  
  listening = true; // so user can cancel early
93  
  //if (!noSpeech) say(hello);
94  
  justASec(); // show interface
95  
  callOptMC('happy);
96  
    
97  
  thread "Long-Poll" {
98  
    repeat with sleep 1 {
99  
      // TODO: interrupt connection on cleanup
100  
      // TODO: make it work for other users!
101  
      S action = postPageSilently(earURL + "/poll", muricaCredentials());
102  
      if (nempty(action))
103  
        for (S s : lines(action)) say(s);
104  
    }
105  
  }
106  
107  
  // setLanguage(language);
108  
  
109  
  aAddMenuItems("Switch to manual mode", "Switch to auto mode");
110  
  
111  
  // init engine?
112  
  if (german()) androidSayInGerman(""); else androidSayInEnglish("");
113  
  
114  
  if (recognitionCancelled) recognitionCancelled = false;
115  
  else
116  
    androidUI(f newRecognizer);
117  
118  
  noMainDone();
119  
}
120  
121  
static void newRecognizer() {
122  
  //print("listening");
123  
  listening = true;
124  
  sr = SpeechRecognizer.createSpeechRecognizer(androidActivity());
125  
  sr.setRecognitionListener(new listener());        
126  
  recog();
127  
}
128  
129  
static class listener implements RecognitionListener {
130  
  public void onReadyForSpeech(Bundle params) {
131  
    if (recognitionCancelled) {
132  
      recognitionCancelled = false;
133  
      sr.stopListening();
134  
      ret;
135  
    }
136  
    callOptMC('setBorderAndStatus, 0xFF66FF66,
137  
      german() ? "JETZT SPRECHEN!" : "TALK NOW!");
138  
    callOptMC('oh);
139  
    //showText(german() ? "SAG WAS." : "TALK NOW.");
140  
  }
141  
  
142  
  public void onBeginningOfSpeech() {
143  
    //showText("User talks");
144  
    //callOptMC('oh);
145  
    if (!manualMode && !muteAlways)
146  
      androidMuteAudio(); // Mute now, so we don't hear the end-of-speech sound
147  
  }
148  
  
149  
  public void onRmsChanged(float rmsdB) {}
150  
  public void onBufferReceived(byte[] buffer) {}
151  
  
152  
  public void onEndOfSpeech() {
153  
    ping();
154  
    //showText("onEndOfSpeech");
155  
    callOptMC('setBorderAndStatus, aWhite(), baseStatus());
156  
  }
157  
  
158  
  public void onError(int error) {
159  
     ping();
160  
     listening = false;
161  
     if (printErrors)
162  
       if (error == 6) // timeout
163  
         print("speech timeout");
164  
       else
165  
         print("error " +  error); // screw the errors!
166  
     try {
167  
       sr.destroy();
168  
     } catch (Throwable e) {
169  
       print(e);
170  
     }
171  
     if (!manualMode)
172  
       newRecognizer();
173  
     else
174  
      callOptMC('setBorderAndStatus, aWhite(), baseStatus());
175  
     callOpt(getMainClass(), "happy");
176  
  }
177  
  
178  
  public void onResults(Bundle results) {
179  
    ping();
180  
    listening = false;
181  
    //showText("onResults");
182  
    ArrayList<S> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
183  
    fS s = data.get(0);
184  
    onInput(s, false);
185  
  }
186  
187  
  public void onPartialResults(Bundle partialResults) {
188  
    print("onPartialResults");
189  
  }
190  
  
191  
  public void onEvent(int eventType, Bundle params) {
192  
    print("onEvent" + eventType);
193  
  }
194  
}
195  
 
196  
svoid onInput(S _s, final bool typed) {
197  
  fS s = trim(_s); if (empty(s)) ret;
198  
  
199  
  if (listenAfterRecognized && !typed)
200  
    androidUI_noWait(r newRecognizer);
201  
    
202  
  handleInputQ.add(r {
203  
    aSetText(userTextView, lang("You: ", "Du: ") + s);
204  
    aPutViewBehindOtherView(userTextView, myTextView);
205  
  
206  
    showText(
207  
      (typed 
208  
        ? lang("You typed: ", "Du schrubst: ")
209  
        : lang("I heard: ", "Ich habe geh\u00f6rt: ")) + quote(s));
210  
  
211  
    postPage(earURL, paramsPlus(muricaCredentials(), input := s));
212  
    
213  
    // TODO: fix the java strings umlaut problem
214  
     
215  
    final boolean goodbye = match3("goodbye", s) || match3("bye", s) || match3("tsch\u00fcss", s) || match3("tsch\u00fcss ...", s);
216  
  
217  
    // get answer
218  
    
219  
    history.add(s);
220  
    
221  
    handleCommand(s);
222  
    
223  
    S answer;
224  
    try {
225  
      answer = goodbye ? "tsch\u00fcss" : callStaticAnswerMethod(s, history);
226  
    } catch (Throwable e) {
227  
      e.printStackTrace();
228  
      appendToFile(getProgramFile("errors.txt"), getTheStackTrace(e));
229  
      answer = "Fehler";
230  
    }
231  
    
232  
    if (answer != null)
233  
      print(answer);
234  
      
235  
    androidUI(r {
236  
      if (goodbye) {
237  
        print("\nGOODBYE!");
238  
        sr.destroy();
239  
        
240  
        callOpt(getMainClass(), "disappear");
241  
      } else {
242  
        sr.stopListening();
243  
        listening = false;
244  
        //newRecognizer(); // always make a new one - gives endless errors
245  
        if (!manualMode)
246  
          recog();
247  
      }
248  
    });
249  
  }); // end of input q
250  
}
251  
252  
svoid recog() {
253  
  if (sr == null) ret with newRecognizer();
254  
  print("recog");
255  
  listening = true;
256  
  justASec();
257  
  Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);        
258  
  intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
259  
  intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
260  
  intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test");
261  
262  
  intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, extraResults); 
263  
  
264  
  // customize recognition time settings
265  
  callOpt(mc(), "recogTimeSettings", intent);
266  
  
267  
  sr.startListening(intent);
268  
  print("started listening");
269  
}
270  
271  
svoid say(fS s) {
272  
  ping();
273  
  lock speechLock;
274  
  showText(">> " + s);
275  
  aSetText(myTextView, lang("Me: ", "Ich: ") + s);
276  
  aPutViewBehindOtherView(myTextView, userTextView);
277  
  if (manualMode && listening) {
278  
    print("User is speaking, postponing speech output.");
279  
    ret;
280  
  }
281  
  androidUI_wait(f stopListening);
282  
  history.add(s);
283  
  mechQ(r {
284  
    appendToMechList_noUniq("Katze | Things the phone said", "[" + localDateWithMilliseconds() + "] " + s) });
285  
  if (!noSpeech) {
286  
    androidUnmuteAudio();
287  
    callOptMC('oh);
288  
    if (fastSynthesis) {
289  
      if (german()) androidSayInGerman(s); else androidSayInEnglish(s);
290  
    } else {
291  
      File mp3 = cereproc_silent(german() ? "Leopold" : "Jack", s);
292  
      androidPlayMp3(androidActivity(), mp3);
293  
    }
294  
    callOptMC('happy);
295  
    if (listenAfterTalk)
296  
      //androidUI_noWait(r startListening); // works, but freezes UI
297  
      androidUI_noWait(r newRecognizer);
298  
  }
299  
  if (muteAlways)
300  
    androidMuteAudio();
301  
}
302  
303  
static void stopListening() {
304  
  listening = false;
305  
  if (sr != null)
306  
    sr.stopListening();
307  
}
308  
309  
svoid startListening() {
310  
  if (listening) ret;
311  
  listening = true;
312  
  recog();
313  
}
314  
  
315  
sbool german() {
316  
  ret swic(language, "de");
317  
}
318  
319  
svoid switchToManual {
320  
  stopListening();
321  
  manualMode = true;
322  
  androidUnmuteAudio();
323  
}
324  
325  
svoid switchToAuto {
326  
  manualMode = false;
327  
  startListening();
328  
  androidUnmuteAudio();
329  
}
330  
331  
svoid onMenuCommand(S s) {
332  
  //print("menu cmd: " + s);
333  
  handleCommand(s);
334  
}
335  
336  
// spoken, typed or through menu
337  
svoid handleCommand(S s) {
338  
  if "stop listening|Switch to manual mode" switchToManual();
339  
  if "Switch to auto mode" switchToAuto();
340  
  if "german|deutsch" setLanguage("de-DE");
341  
  if "english|englisch" setLanguage("en-US");
342  
}
343  
344  
svoid cleanMeUp_leo {
345  
  if (sr != null) {
346  
    sr.destroy();
347  
    sr = null;
348  
  }
349  
}
350  
351  
sS baseStatus() {
352  
  ret !manualMode ? "" :
353  
    german() ? "KLICKEN ZUM SPRECHEN" : "CLICK ON ME IF YOU WANT TO TALK";
354  
}
355  
356  
sS lang(S en, S de) {
357  
  ret german() ? de : en;
358  
}
359  
360  
svoid setLanguage(S l) {
361  
  language = l;
362  
  setCurrentBotLanguage(l);
363  
  aSetText(lblInputView, inputViewLabel());
364  
  androidUI_noWait(r newRecognizer);
365  
}
366  
367  
svoid justASec {
368  
  callOptMC('setBorderAndStatus, waitColor,
369  
    german() ? "BITTE WARTEN" : "JUST A SEC"); // (initializing speech recognizer)
370  
}
371  
372  
// Don't use - freezes UI
373  
svoid _cancelRecognition {
374  
  //print("Cancelling recognition " + listening + " " + (sr != null));
375  
  recognitionCancelled = true;
376  
  //stopListening();
377  
  //listening = false;
378  
  //if (sr != null) sr.cancel();
379  
  //callOptMC('setBorderAndStatus, aWhite(), baseStatus());
380  
}
381  
382  
static void showText(S text) {
383  
  print(text);
384  
  /*if (neq(onScreenText, text) && lastEmotion != null) {
385  
    onScreenText = text;
386  
    emo_show();
387  
  }*/
388  
}
389  
  
390  
static void emo(S emotion) {
391  
  if (!showPicture) return;
392  
  int i;
393  
  for (i = 0; i < emotions.size(); i += 2)
394  
    if (emotions.get(i+1).equalsIgnoreCase(emotion))
395  
      break;
396  
  if (i >= emotions.size()) {
397  
    print("Emotion not found: " + emotion);
398  
    // use the last one from the list as default
399  
    i -= 2;
400  
  }
401  
  lastEmotionImage = emotions.get(i);
402  
  lastEmotion = emotions.get(i+1);
403  
  emo_show();
404  
}
405  
406  
static void emo_show() {
407  
  if (!showPicture) return;
408  
  
409  
  androidUI {
410  
    Runnable onClick = r {
411  
      if (!manualMode) ret;
412  
      
413  
      //if (listening)
414  
      if (borderColor != -1) {
415  
        //androidLater(500, r { stopListening(); });
416  
        //showText("stop");
417  
        stopListening();
418  
      } else {
419  
        //showText ("start");
420  
        newRecognizer();
421  
      }
422  
    };
423  
  
424  
    if (statusView == null) {
425  
      // init UI
426  
      
427  
      statusView = aFontSize(statusFontSize, aSetForeground(aBlack(), aCenteredTextView()));
428  
      inputView = aSingleLineEditText();
429  
      aOnEnter(inputView, r { onInput(aGetText(inputView), true) });
430  
      //aOnChange(inputView, f cancelRecognition); // freezes!?
431  
      //aOnChange(inputView, r { listening = false });
432  
      //aOnChange(inputView, f cancelRecognition);
433  
      lblInputView = aFontSize(20, aBottomTextView(inputViewLabel()));
434  
      userTextView = aFontSize(userFontSize, aSetForeground(0xFF000055, aTextViewWithWindowFocusChangeNotify(voidfunc(Bool windowFocused) {
435  
        if (windowFocused && recogOnActivate) newRecognizer();
436  
      })));
437  
      myTextView = aFontSize(myFontSize, aSetForeground(0xFF005500, aRightAlignedTextView());
438  
      androidShow(aVerticalLinearLayout(
439  
        statusView,
440  
        aWestCenterAndEast/*_alignTop2*/(
441  
          leftFlag = aVerticalLinearLayout(androidClickableImage(#1101639, 0 /*transparent*/, r { setLanguage("en-US") })),
442  
          emoView = androidClickableImage(lastEmotionImage, aWhite(), onClick),
443  
          rightFlag = aVerticalLinearLayout(androidClickableImage(#1101638, 0 /*transparent*/, r { setLanguage("de-DE") }))
444  
        ),
445  
        userTextView,
446  
        myTextView,
447  
        androidPrintLogScrollView(),
448  
        aWestAndCenter(lblInputView, inputView)));
449  
    }
450  
    
451  
    if (statusText == null) statusText = baseStatus();
452  
    aSetText(statusView, statusText);
453  
    aSetImageFromSnippet(emoView, lastEmotionImage);
454  
    aSetBackground(emoView, borderColor);
455  
    aSetBackground(eq(language, "en-US") ? borderColor : 0xFFCCCCCC, leftFlag);
456  
    aSetBackground(eq(language, "de-DE") ? borderColor : 0xFFCCCCCC, rightFlag);
457  
458  
    
459  
    /*doEvery(1000, new Runnable {
460  
      S text = "";
461  
      
462  
      public void run() {
463  
        S s = aGetText(inputView);
464  
        if (eq(s, text)) ret;
465  
        text = s;
466  
        cancelRecognition();
467  
      }
468  
    });*/
469  
  }
470  
}
471  
472  
static void setBorderAndStatus(int color, S status) {
473  
  if (color != borderColor || neq(status, statusText)) {
474  
    borderColor = color;
475  
    statusText = status;
476  
    if (lastEmotion != null)
477  
      emo(lastEmotion);
478  
  }
479  
}
480  
481  
static void setBorderColor(int color) {
482  
  if (color != borderColor) {
483  
    borderColor = color;
484  
    if (lastEmotion != null)
485  
      emo(lastEmotion);
486  
  }
487  
}
488  
    
489  
static void happy() { emo("happy"); }
490  
static void unhappy() { emo("unhappy"); }
491  
static void oh() { emo("oh"); }
492  
static void bored() { emo("bored"); }
493  
static void wideMouth() { emo("wide mouth"); }
494  
495  
static void disappear() {
496  
  if (!showPicture) ret;
497  
  happy();
498  
  androidLater(1000, r {
499  
    androidShowFullScreenColor(0xFFFFFFFF);
500  
    androidLater(1000, r {
501  
      System.exit(0); // totally unrecommended but works
502  
    });
503  
  });
504  
}
505  
506  
sS inputViewLabel() {
507  
  ret lang("Or type here:", "Oder hier tippen:");
508  
}
509  
510  
svoid _onLoad_handleInputQ {
511  
  handleInputQ = startQ();
512  
}

Author comment

Began life as a copy of #1023267

download  show line numbers  debug dex  old transpilations   

Travelled to 7 computer(s): bhatertpkbcr, cfunsshuasjs, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt

No comments. add comment

Snippet ID: #1023282
Snippet name: Android Cat Main (include, v4, dev., broken?)
Eternal ID of this version: #1023282/23
Text MD5: 12910a3bc2d1db7889a04edba7b25f07
Author: stefan
Category: javax / a.i.
Type: JavaX fragment (include)
Public (visible to everyone): Yes
Archived (hidden from active list): No
Created/modified: 2019-06-10 14:50:43
Source code size: 14366 bytes / 512 lines
Pitched / IR pitched: No / No
Views / Downloads: 200 / 303
Version history: 22 change(s)
Referenced in: [show references]