Not logged in.  Login/Logout/Register | List snippets | | Create snippet | Upload image | Upload data

501
LINES

< > BotCompany Repo | #1023361 // Android Cat Main (include, v3, OLD, single-user)

JavaX fragment (include)

set flag NoAWT. set flag Android.

import android.app.*;
import android.content.*;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.widget.Button;
import android.widget.TextView;
import android.util.Log;
import android.media.*;

sS earURL = "http://botcompany.de:8082";

static SpeechRecognizer sr;
static final String TAG = "MyStt3Activity";
static S language = "en-US";
static int extraResults = 1;

static bool fastSynthesis = false; // use device voice (faster)
static bool noSpeech = false; // disable speech output

static bool manualMode = false; // Start recognition on click only
sbool muteAlways, printErrors;
sbool listenAfterTalk = true; // it works now

static volatile bool listening, recognitionCancelled;

static new L<S> history;
static Lock speechLock = lock();

static boolean showPicture = true; // global switch. if false, you get the normal text display instead of the smiley
  
static L<S> emotions = litlist(
  "#1001283", "happy",
  "#1001284", "unhappy",
  "#1001285", "oh",
  "#1001286", "bored",
  "#1001287", "wide mouth");

static float statusFontSize = /*25*/17;
static float userFontSize = 25;
static float myFontSize = 25;
static int borderColor = 0xFFFFFFFF;
static int waitColor = /*0xFFFF0000*/ 0xFFFFFF00; // yellow
static S lastEmotionImage, lastEmotion /*, onScreenText = ""*/;
static ImageView emoView;
static TextView statusView, userTextView, myTextView, lblInputView;
static LinearLayout leftFlag, rightFlag;
sS statusText;
static EditText inputView;
sbool recogOnActivate = true; // start listening when app activated
sbool hideTitleBar;

svoid androidCatMain {
  if (androidIsAdminMode()) {
    print("Going admin.");
    androidUnsetAdminMode();
    removeMyselfAsInjection();
    aShowStartScreen();
    ret;
  }
    
  if (hideTitleBar) aHideTitleBar();
  
  androidSay_keepEngine = true;
  
  if (muteAlways) androidMuteAudio();
  
  S hello = null;
  
  /*if (args.length != 0 && args[0].equals("nogfx"))
    setOpt(getMainClass(), "showPicture", false);*/
  
  try {
    history.add("*");
    //hello = callStaticAnswerMethod("*", history);
    if (hello == null) hello = german() ? "hallo" : "hello";
  } catch (Throwable e) {
    e.printStackTrace();
    return;
  }
  
  if (!androidIsAdminMode())
    aClearConsole();

  listening = true; // so user can cancel early
  //if (!noSpeech) say(hello);
  justASec(); // show interface
  callOptMC('happy);
    
  thread "Long-Poll" {
    repeat with sleep 1 {
      // TODO: interrupt connection on cleanup
      // TODO: make it work for other users!
      S action = postPageSilently(earURL + "/poll", muricaCredentials());
      if (nempty(action))
        for (S s : lines(action)) say(s);
    }
  }

  // setLanguage(language);
  
  aAddMenuItems("Switch to manual mode", "Switch to auto mode");
  
  // init engine?
  if (german()) androidSayInGerman(""); else androidSayInEnglish("");
  
  if (recognitionCancelled) recognitionCancelled = false;
  else
    androidUI(f newRecognizer);

  noMainDone();
}

static void newRecognizer() {
  //print("listening");
  listening = true;
  sr = SpeechRecognizer.createSpeechRecognizer(androidActivity());
  sr.setRecognitionListener(new listener());        
  recog();
}

static class listener implements RecognitionListener {
  public void onReadyForSpeech(Bundle params) {
    if (recognitionCancelled) {
      recognitionCancelled = false;
      sr.stopListening();
      ret;
    }
    callOptMC('setBorderAndStatus, 0xFF66FF66,
      german() ? "JETZT SPRECHEN!" : "TALK NOW!");
    callOptMC('oh);
    //showText(german() ? "SAG WAS." : "TALK NOW.");
  }
  
  public void onBeginningOfSpeech() {
    //showText("User talks");
    //callOptMC('oh);
    if (!manualMode && !muteAlways)
      androidMuteAudio(); // Mute now, so we don't hear the end-of-speech sound
  }
  
  public void onRmsChanged(float rmsdB) {}
  public void onBufferReceived(byte[] buffer) {}
  
  public void onEndOfSpeech() {
    ping();
    //showText("onEndOfSpeech");
    callOptMC('setBorderAndStatus, aWhite(), baseStatus());
  }
  
  public void onError(int error) {
     ping();
     listening = false;
     if (printErrors)
       if (error == 6) // timeout
         print("speech timeout");
       else
         print("error " +  error); // screw the errors!
     try {
       sr.destroy();
     } catch (Throwable e) {
       print(e);
     }
     if (!manualMode)
       newRecognizer();
     else
      callOptMC('setBorderAndStatus, aWhite(), baseStatus());
     callOpt(getMainClass(), "happy");
  }
  
  public void onResults(Bundle results) {
    ping();
    listening = false;
    //showText("onResults");
    ArrayList<S> data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
    fS s = data.get(0);
    onInput(s, false);
  }

  public void onPartialResults(Bundle partialResults) {
    print("onPartialResults");
  }
  
  public void onEvent(int eventType, Bundle params) {
    print("onEvent" + eventType);
  }
}
 
svoid onInput(S _s, final bool typed) {
  fS s = trim(_s); if (empty(s)) ret;
  thread "onInput" {
    postPage(earURL, paramsPlus(muricaCredentials(), input := s));
    aSetText(userTextView, lang("You: ", "Du: ") + s);
    aPutViewBehindOtherView(userTextView, myTextView);
  
    showText(
      (typed 
        ? lang("You typed: ", "Du schrubst: ")
        : lang("I heard: ", "Ich habe geh\u00f6rt: ")) + quote(s));
  
    // TODO: fix the java strings umlaut problem
     
    final boolean goodbye = match3("goodbye", s) || match3("bye", s) || match3("tsch\u00fcss", s) || match3("tsch\u00fcss ...", s);
  
    // get answer
    
    history.add(s);
    
    handleCommand(s);
    
    S answer;
    try {
      answer = goodbye ? "tsch\u00fcss" : callStaticAnswerMethod(s, history);
    } catch (Throwable e) {
      e.printStackTrace();
      appendToFile(getProgramFile("errors.txt"), getTheStackTrace(e));
      answer = "Fehler";
    }
    
    if (answer != null)
      print(answer);
      
    androidUI(r {
      if (goodbye) {
        print("\nGOODBYE!");
        sr.destroy();
        
        callOpt(getMainClass(), "disappear");
      } else {
        sr.stopListening();
        listening = false;
        //newRecognizer(); // always make a new one - gives endless errors
        if (!manualMode)
          recog();
      }
    });
  } // end of thread
}

svoid recog() {
  if (sr == null) ret with newRecognizer();
  print("recog");
  listening = true;
  justASec();
  Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);        
  intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
  intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
  intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test");

  intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, extraResults); 
  
  // customize recognition time settings
  callOpt(mc(), "recogTimeSettings", intent);
  
  sr.startListening(intent);
  print("started listening");
}

svoid say(fS s) {
  ping();
  lock speechLock;
  showText(">> " + s);
  aSetText(myTextView, lang("Me: ", "Ich: ") + s);
  aPutViewBehindOtherView(myTextView, userTextView);
  if (manualMode && listening) {
    print("User is speaking, postponing speech output.");
    ret;
  }
  androidUI_wait(f stopListening);
  history.add(s);
  mechQ(r {
    appendToMechList_noUniq("Katze | Things the phone said", "[" + localDateWithMilliseconds() + "] " + s) });
  if (!noSpeech) {
    androidUnmuteAudio();
    callOptMC('oh);
    if (fastSynthesis) {
      if (german()) androidSayInGerman(s); else androidSayInEnglish(s);
    } else {
      File mp3 = cereproc_silent(german() ? "Leopold" : "Jack", s);
      androidPlayMp3(androidActivity(), mp3);
    }
    callOptMC('happy);
    if (listenAfterTalk)
      //androidUI_noWait(r startListening); // works, but freezes UI
      androidUI_noWait(r newRecognizer);
  }
  if (muteAlways)
    androidMuteAudio();
}

static void stopListening() {
  listening = false;
  if (sr != null)
    sr.stopListening();
}

svoid startListening() {
  if (listening) ret;
  listening = true;
  recog();
}
  
sbool german() {
  ret swic(language, "de");
}

svoid switchToManual {
  stopListening();
  manualMode = true;
  androidUnmuteAudio();
}

svoid switchToAuto {
  manualMode = false;
  startListening();
  androidUnmuteAudio();
}

svoid onMenuCommand(S s) {
  //print("menu cmd: " + s);
  handleCommand(s);
}

// spoken, typed or through menu
svoid handleCommand(S s) {
  if "stop listening|Switch to manual mode" switchToManual();
  if "Switch to auto mode" switchToAuto();
  if "german|deutsch" setLanguage("de-DE");
  if "english|englisch" setLanguage("en-US");
}

svoid cleanMeUp_leo {
  if (sr != null) {
    sr.destroy();
    sr = null;
  }
}

sS baseStatus() {
  ret !manualMode ? "" :
    german() ? "KLICKEN ZUM SPRECHEN" : "CLICK ON ME IF YOU WANT TO TALK";
}

sS lang(S en, S de) {
  ret german() ? de : en;
}

svoid setLanguage(S l) {
  language = l;
  setCurrentBotLanguage(l);
  aSetText(lblInputView, inputViewLabel());
  androidUI_noWait(r newRecognizer);
}

svoid justASec {
  callOptMC('setBorderAndStatus, waitColor,
    german() ? "BITTE WARTEN" : "JUST A SEC"); // (initializing speech recognizer)
}

// Don't use - freezes UI
svoid _cancelRecognition {
  //print("Cancelling recognition " + listening + " " + (sr != null));
  recognitionCancelled = true;
  //stopListening();
  //listening = false;
  //if (sr != null) sr.cancel();
  //callOptMC('setBorderAndStatus, aWhite(), baseStatus());
}

static void showText(S text) {
  print(text);
  /*if (neq(onScreenText, text) && lastEmotion != null) {
    onScreenText = text;
    emo_show();
  }*/
}
  
static void emo(S emotion) {
  if (!showPicture) return;
  int i;
  for (i = 0; i < emotions.size(); i += 2)
    if (emotions.get(i+1).equalsIgnoreCase(emotion))
      break;
  if (i >= emotions.size()) {
    print("Emotion not found: " + emotion);
    // use the last one from the list as default
    i -= 2;
  }
  lastEmotionImage = emotions.get(i);
  lastEmotion = emotions.get(i+1);
  emo_show();
}

static void emo_show() {
  if (!showPicture) return;
  
  androidUI {
    Runnable onClick = r {
      if (!manualMode) ret;
      
      //if (listening)
      if (borderColor != -1) {
        //androidLater(500, r { stopListening(); });
        //showText("stop");
        stopListening();
      } else {
        //showText ("start");
        newRecognizer();
      }
    };
  
    if (statusView == null) {
      // init UI
      
      statusView = aFontSize(statusFontSize, aSetForeground(aBlack(), aCenteredTextView()));
      inputView = aSingleLineEditText();
      aOnEnter(inputView, r { onInput(aGetText(inputView), true) });
      //aOnChange(inputView, f cancelRecognition); // freezes!?
      //aOnChange(inputView, r { listening = false });
      //aOnChange(inputView, f cancelRecognition);
      lblInputView = aFontSize(20, aBottomTextView(inputViewLabel()));
      userTextView = aFontSize(userFontSize, aSetForeground(0xFF000055, aTextViewWithWindowFocusChangeNotify(voidfunc(Bool windowFocused) {
        if (windowFocused && recogOnActivate) newRecognizer();
      })));
      myTextView = aFontSize(myFontSize, aSetForeground(0xFF005500, aRightAlignedTextView());
      androidShow(aVerticalLinearLayout(
        statusView,
        aWestCenterAndEast/*_alignTop2*/(
          leftFlag = aVerticalLinearLayout(androidClickableImage(#1101639, 0 /*transparent*/, r { setLanguage("en-US") })),
          emoView = androidClickableImage(lastEmotionImage, aWhite(), onClick),
          rightFlag = aVerticalLinearLayout(androidClickableImage(#1101638, 0 /*transparent*/, r { setLanguage("de-DE") }))
        ),
        userTextView,
        myTextView,
        androidPrintLogScrollView(),
        aWestAndCenter(lblInputView, inputView)));
    }
    
    if (statusText == null) statusText = baseStatus();
    aSetText(statusView, statusText);
    aSetImageFromSnippet(emoView, lastEmotionImage);
    aSetBackground(emoView, borderColor);
    aSetBackground(eq(language, "en-US") ? borderColor : 0xFFCCCCCC, leftFlag);
    aSetBackground(eq(language, "de-DE") ? borderColor : 0xFFCCCCCC, rightFlag);

    
    /*doEvery(1000, new Runnable {
      S text = "";
      
      public void run() {
        S s = aGetText(inputView);
        if (eq(s, text)) ret;
        text = s;
        cancelRecognition();
      }
    });*/
  }
}

static void setBorderAndStatus(int color, S status) {
  if (color != borderColor || neq(status, statusText)) {
    borderColor = color;
    statusText = status;
    if (lastEmotion != null)
      emo(lastEmotion);
  }
}

static void setBorderColor(int color) {
  if (color != borderColor) {
    borderColor = color;
    if (lastEmotion != null)
      emo(lastEmotion);
  }
}
    
static void happy() { emo("happy"); }
static void unhappy() { emo("unhappy"); }
static void oh() { emo("oh"); }
static void bored() { emo("bored"); }
static void wideMouth() { emo("wide mouth"); }

static void disappear() {
  if (!showPicture) ret;
  happy();
  androidLater(1000, r {
    androidShowFullScreenColor(0xFFFFFFFF);
    androidLater(1000, r {
      System.exit(0); // totally unrecommended but works
    });
  });
}

sS inputViewLabel() {
  ret lang("Or type here:", "Oder hier tippen:");
}

Author comment

Began life as a copy of #1023282

download  show line numbers  debug dex  old transpilations   

Travelled to 7 computer(s): bhatertpkbcr, cfunsshuasjs, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt

No comments. add comment

Snippet ID: #1023361
Snippet name: Android Cat Main (include, v3, OLD, single-user)
Eternal ID of this version: #1023361/4
Text MD5: ebd5c5e98ad6033b9d588e0292c1b7b1
Author: stefan
Category: javax / a.i.
Type: JavaX fragment (include)
Public (visible to everyone): Yes
Archived (hidden from active list): No
Created/modified: 2019-06-13 12:45:18
Source code size: 14049 bytes / 501 lines
Pitched / IR pitched: No / No
Views / Downloads: 167 / 289
Version history: 3 change(s)
Referenced in: [show references]