Not logged in.  Login/Logout/Register | List snippets | | Create snippet | Upload image | Upload data

550
LINES

< > BotCompany Repo | #1023372 // Android Cat (Assistant, multi-user, LIVE)

JavaX source code (Android) [tags: use-pretranspiled] - run with: the app

Libraryless. Click here for Pure Java version (6660L/47K/162K).

!7

p {
  language = currentBotLanguage();
  muteAlways = false;
  fastSynthesis = true;
  manualMode = false;
  halfAutoMode = false; // listen again when something was heard
  androidCatMain();
}

set flag NoAWT. set flag Android.

import android.app.*;
import android.content.*;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.widget.Button;
import android.widget.TextView;
import android.util.Log;
import android.media.*;

sS earURL = "http://botcompany.de:8083";

static SpeechRecognizer sr;
static final String TAG = "MyStt3Activity";
static S language = "en-US";
static int extraResults = 1;

static bool fastSynthesis = false; // use device voice (faster)
static bool noSpeech = false; // disable speech output

static bool manualMode = false; // Start recognition on click only
sbool halfAutoMode;
sbool muteAlways, printErrors;
sbool listenAfterTalk = true; // it works now

static volatile bool listening, recognitionCancelled;

static new L<S> history;
static Lock speechLock = lock();

static boolean showPicture = true; // global switch. if false, you get the normal text display instead of the smiley
  
static L<S> emotions = litlist(
  "#1001283", "happy",
  "#1001284", "unhappy",
  "#1001285", "oh",
  "#1001286", "bored",
  "#1001287", "wide mouth");

static float statusFontSize = /*25*/17;
static float userFontSize = 25;
static float myFontSize = 25;
static int borderColor = 0xFFFFFFFF;
static int waitColor = /*0xFFFF0000*/ 0xFFFFFF00; // yellow
static S lastEmotionImage, lastEmotion /*, onScreenText = ""*/;
static ImageView emoView;
static TextView statusView, userTextView, myTextView, lblInputView;
static LinearLayout leftFlag, rightFlag;
sS statusText;
static EditText inputView;
sbool recogOnActivate = true; // start listening when app activated
sbool hideTitleBar;
static CatConnector connector;
static LS postponed = syncList(); // output lines postponed because user is speaking

svoid androidCatMain {
  if (androidIsAdminMode()) {
    print("Going admin.");
    androidUnsetAdminMode();
    removeMyselfAsInjection();
    aShowStartScreen();
    ret;
  }
    
  if (hideTitleBar) aHideTitleBar();
  
  androidSay_keepEngine = true;
  
  if (muteAlways) androidMuteAudio();
  
  S hello = null;
  
  /*if (args.length != 0 && args[0].equals("nogfx"))
    setOpt(getMainClass(), "showPicture", false);*/
  
  try {
    history.add("*");
    //hello = callStaticAnswerMethod("*", history);
    if (hello == null) hello = german() ? "hallo" : "hello";
  } catch (Throwable e) {
    e.printStackTrace();
    return;
  }
  
  if (!androidIsAdminMode())
    aClearConsole();

  listening = true; // so user can cancel early
  //if (!noSpeech) say(hello);
  justASec(); // show interface
  callOptMC('happy);
  
  connector = new CatConnectorImpl(androidAssistant_token()); 
  connector.startEar(vf<S> say);

  // setLanguage(language);
  
  aAddMenuItems("Switch to manual mode", "Switch to auto mode");
  
  // init engine?
  if (german()) androidSayInGerman(""); else androidSayInEnglish("");
  
  if (recognitionCancelled) recognitionCancelled = false;
  else
    androidUI(f newRecognizer);

  noMainDone();
}

static void newRecognizer() {
  //print("listening");
  listening = true;
  sr = SpeechRecognizer.createSpeechRecognizer(androidActivity());
  sr.setRecognitionListener(new listener());        
  recog();
}

static class listener implements RecognitionListener {
  public void onReadyForSpeech(Bundle params) {
    if (recognitionCancelled) {
      recognitionCancelled = false;
      sr.stopListening();
      ret;
    }
    callOptMC('setBorderAndStatus, 0xFF66FF66,
      german() ? "JETZT SPRECHEN!" : "TALK NOW!");
    callOptMC('oh);
    //showText(german() ? "SAG WAS." : "TALK NOW.");
  }
  
  public void onBeginningOfSpeech() {
    //showText("User talks");
    //callOptMC('oh);
    if (!manualMode && !muteAlways)
      androidMuteAudio(); // Mute now, so we don't hear the end-of-speech sound
  }
  
  public void onRmsChanged(float rmsdB) {}
  public void onBufferReceived(byte[] buffer) {}
  
  public void onEndOfSpeech() {
    ping();
    //showText("onEndOfSpeech");
    callOptMC('setBorderAndStatus, aWhite(), baseStatus());
  }
  
  public void onError(int error) {
     ping();
     listening = false;
     if (printErrors)
       if (error == 6) // timeout
         print("speech timeout");
       else
         print("error " +  error); // screw the errors!
     try {
       sr.destroy();
     } catch (Throwable e) {
       print(e);
     }
     if (!manualMode)
       newRecognizer();
     else
      callOptMC('setBorderAndStatus, aWhite(), baseStatus());
     callOpt(getMainClass(), "happy");
  }
  
  public void onResults(Bundle results) {
    ping();
    listening = false;
    //showText("onResults");
    ArrayList<S> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
    fS s = data.get(0);
    onInput(s, false);
  }

  public void onPartialResults(Bundle partialResults) {
    print("onPartialResults");
  }
  
  public void onEvent(int eventType, Bundle params) {
    print("onEvent" + eventType);
  }
}
 
svoid onInput(S _s, final bool typed) {
  fS s = trim(_s); if (empty(s)) ret;
  thread "onInput" {
    connector.sendLine(s, typed);
    aSetText(userTextView, lang("You: ", "Du: ") + s);
    aPutViewBehindOtherView(userTextView, myTextView);
  
    showText(
      (typed 
        ? lang("You typed: ", "Du schrubst: ")
        : lang("I heard: ", "Ich habe geh\u00f6rt: ")) + quote(s));
  
    // TODO: fix the java strings umlaut problem
     
    final boolean goodbye = match3("goodbye", s) || match3("bye", s) || match3("tsch\u00fcss", s) || match3("tsch\u00fcss ...", s);
  
    // get answer
    
    history.add(s);
    
    handleCommand(s);
    
    S answer;
    try {
      answer = goodbye ? "tsch\u00fcss" : callStaticAnswerMethod(s, history);
    } catch (Throwable e) {
      e.printStackTrace();
      appendToFile(getProgramFile("errors.txt"), getTheStackTrace(e));
      answer = "Fehler";
    }
    
    if (answer != null)
      print(answer);
      
    androidUI(r {
      if (goodbye) {
        print("\nGOODBYE!");
        sr.destroy();
        
        callOpt(getMainClass(), "disappear");
      } else {
        print("No goodbye");
        sr.stopListening();
        listening = false;
        //newRecognizer(); // always make a new one - gives endless errors
        if (!manualMode || halfAutoMode)
          recog();
      }
    });
  } // end of thread
}

svoid recog() {
  if (sr == null) ret with newRecognizer();
  print("recog");
  listening = true;
  justASec();
  Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);        
  intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
  intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
  intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test");

  intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, extraResults); 
  
  // customize recognition time settings
  callOpt(mc(), "recogTimeSettings", intent);
  
  sr.startListening(intent);
  print("started listening");
}

svoid say(fS s) {
  ping();
  lock speechLock;
  showText(">> " + s);
  aSetText(myTextView, lang("Me: ", "Ich: ") + s);
  aPutViewBehindOtherView(myTextView, userTextView);
  if (manualMode && listening) {
    print("User is speaking, postponing speech output.");
    postponed.add(s);
    ret;
  }
  androidUI_wait(f stopListening);
  history.add(s);
  if (!noSpeech) {
    androidUnmuteAudio();
    callOptMC('oh);
    if (fastSynthesis) {
      if (german()) androidSayInGerman(s); else androidSayInEnglish(s);
    } else {
      File mp3 = cereproc_silent(german() ? "Leopold" : "Jack", s);
      androidPlayMp3(androidActivity(), mp3);
    }
    callOptMC('happy);
    if (listenAfterTalk)
      //androidUI_noWait(r startListening); // works, but freezes UI
      androidUI_noWait(r newRecognizer);
  }
  if (muteAlways)
    androidMuteAudio();
}

static void stopListening() {
  listening = false;
  if (sr != null)
    sr.stopListening();
}

svoid startListening() {
  if (listening) ret;
  listening = true;
  recog();
}
  
sbool german() {
  ret swic(language, "de");
}

svoid switchToManual {
  stopListening();
  manualMode = true;
  androidUnmuteAudio();
}

svoid switchToAuto {
  manualMode = false;
  startListening();
  androidUnmuteAudio();
}

svoid onMenuCommand(S s) {
  //print("menu cmd: " + s);
  handleCommand(s);
}

// spoken, typed or through menu
svoid handleCommand(S s) {
  if "stop listening|Switch to manual mode" switchToManual();
  if "Switch to auto mode" switchToAuto();
  if "german|deutsch" setLanguage("de-DE");
  if "english|englisch" setLanguage("en-US");
}

svoid cleanMeUp_leo {
  if (sr != null) {
    sr.destroy();
    sr = null;
  }
}

sS baseStatus() {
  ret !manualMode ? "" :
    german() ? "KLICKEN ZUM SPRECHEN" : "CLICK ON ME IF YOU WANT TO TALK";
}

sS lang(S en, S de) {
  ret german() ? de : en;
}

svoid setLanguage(S l) {
  language = l;
  setCurrentBotLanguage(l);
  aSetText(lblInputView, inputViewLabel());
  androidUI_noWait(r newRecognizer);
}

svoid justASec {
  callOptMC('setBorderAndStatus, waitColor,
    german() ? "BITTE WARTEN" : "JUST A SEC"); // (initializing speech recognizer)
}

// Don't use - freezes UI
svoid _cancelRecognition {
  //print("Cancelling recognition " + listening + " " + (sr != null));
  recognitionCancelled = true;
  //stopListening();
  //listening = false;
  //if (sr != null) sr.cancel();
  //callOptMC('setBorderAndStatus, aWhite(), baseStatus());
}

static void showText(S text) {
  print(text);
  /*if (neq(onScreenText, text) && lastEmotion != null) {
    onScreenText = text;
    emo_show();
  }*/
}
  
static void emo(S emotion) {
  if (!showPicture) return;
  int i;
  for (i = 0; i < emotions.size(); i += 2)
    if (emotions.get(i+1).equalsIgnoreCase(emotion))
      break;
  if (i >= emotions.size()) {
    print("Emotion not found: " + emotion);
    // use the last one from the list as default
    i -= 2;
  }
  lastEmotionImage = emotions.get(i);
  lastEmotion = emotions.get(i+1);
  emo_show();
}

static void emo_show() {
  if (!showPicture) return;
  
  androidUI {
    Runnable onClick = r {
      if (!manualMode) ret;
      
      //if (listening)
      if (borderColor != -1) {
        //androidLater(500, r { stopListening(); });
        //showText("stop");
        stopListening();
      } else {
        //showText ("start");
        newRecognizer();
      }
    };
  
    if (statusView == null) {
      // init UI
      
      statusView = aFontSize(statusFontSize, aSetForeground(aBlack(), aCenteredTextView()));
      inputView = aSingleLineEditText();
      aOnEnter(inputView, r { onInput(aGetText(inputView), true) });
      //aOnChange(inputView, f cancelRecognition); // freezes!?
      //aOnChange(inputView, r { listening = false });
      //aOnChange(inputView, f cancelRecognition);
      lblInputView = aFontSize(20, aBottomTextView(inputViewLabel()));
      userTextView = aFontSize(userFontSize, aSetForeground(0xFF000055, aTextViewWithWindowFocusChangeNotify(voidfunc(Bool windowFocused) {
        print("windowFocusChange " + windowFocused);
        if (windowFocused) {
          //if (recogOnActivate) newRecognizer(); // let the server do it now
          thread { connector.sendEvent('windowFocused); }
        }
      })));
      myTextView = aFontSize(myFontSize, aSetForeground(0xFF005500, aRightAlignedTextView());
      androidShow(aVerticalLinearLayout(
        statusView,
        aWestCenterAndEast/*_alignTop2*/(
          leftFlag = aVerticalLinearLayout(androidClickableImage(#1101639, 0 /*transparent*/, r { setLanguage("en-US") })),
          emoView = androidClickableImage(lastEmotionImage, aWhite(), onClick),
          rightFlag = aVerticalLinearLayout(androidClickableImage(#1101638, 0 /*transparent*/, r { setLanguage("de-DE") }))
        ),
        userTextView,
        myTextView,
        androidPrintLogScrollView(),
        aWestAndCenter(lblInputView, inputView)));
    }
    
    if (statusText == null) statusText = baseStatus();
    aSetText(statusView, statusText);
    aSetImageFromSnippet(emoView, lastEmotionImage);
    aSetBackground(emoView, borderColor);
    aSetBackground(eq(language, "en-US") ? borderColor : 0xFFCCCCCC, leftFlag);
    aSetBackground(eq(language, "de-DE") ? borderColor : 0xFFCCCCCC, rightFlag);

    
    /*doEvery(1000, new Runnable {
      S text = "";
      
      public void run() {
        S s = aGetText(inputView);
        if (eq(s, text)) ret;
        text = s;
        cancelRecognition();
      }
    });*/
  }
}

static void setBorderAndStatus(int color, S status) {
  if (color != borderColor || neq(status, statusText)) {
    borderColor = color;
    statusText = status;
    if (lastEmotion != null)
      emo(lastEmotion);
    if (!(manualMode && listening) && nempty(postponed))
      for (S s : getAndClearList(postponed))
        say(s);
  }
}

static void setBorderColor(int color) {
  if (color != borderColor) {
    borderColor = color;
    if (lastEmotion != null)
      emo(lastEmotion);
  }
}
    
static void happy() { emo("happy"); }
static void unhappy() { emo("unhappy"); }
static void oh() { emo("oh"); }
static void bored() { emo("bored"); }
static void wideMouth() { emo("wide mouth"); }

static void disappear() {
  if (!showPicture) ret;
  happy();
  androidLater(1000, r {
    androidShowFullScreenColor(0xFFFFFFFF);
    androidLater(1000, r {
      System.exit(0); // totally unrecommended but works
    });
  });
}

sS inputViewLabel() {
  ret lang("Or type here:", "Oder hier tippen:");
}

sinterface CatConnector {
  void sendLine(S line, bool typed);
  void sendEvent(S event);
  AutoCloseable startEar(VF1<S> onLine);
}

sclass CatConnectorImpl implements CatConnector {
  S token;
  
  *() {}
  *(S *token) {}
  
  public void sendLine(S line, bool typed) {
    postPage(earURL + "/heard", +token, +line, typed := typed ? "1" : null, +language);
  }
  
  public void sendEvent(S event) {
    postPage(earURL + "/event", +token, what := event, +language);
  }
  
  public AutoCloseable startEar(final VF1<S> onLine) {
    thread "Long-Poll" {
      repeat with sleep 1 {
        pcall {
          // TODO: interrupt connection on cleanup
          S action = postPageSilently(earURL + "/poll", +token);
          for (S s : lines(action))
            pcallF(onLine, s);
        }
      }
    }
    null;
  }
}

Author comment

Began life as a copy of #1014854

download  show line numbers  debug dex  old transpilations   

Travelled to 11 computer(s): bhatertpkbcr, bzsirmaerhyh, cfunsshuasjs, mqmicjfqzzot, mqqgnosmbjvj, odkhaoglnmdk, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt, xrpafgyirdlv

No comments. add comment

Snippet ID: #1023372
Snippet name: Android Cat (Assistant, multi-user, LIVE)
Eternal ID of this version: #1023372/17
Text MD5: 0c336c007e45cc1ecc57d206e3a64077
Transpilation MD5: eb5ae562a3d9803df3746b3ca334fcb0
Author: stefan
Category: javax / android / a.i.
Type: JavaX source code (Android)
Public (visible to everyone): Yes
Archived (hidden from active list): No
Created/modified: 2020-02-17 13:46:44
Source code size: 15252 bytes / 550 lines
Pitched / IR pitched: No / No
Views / Downloads: 304 / 760
Version history: 16 change(s)
Referenced in: [show references]