Not logged in.  Login/Logout/Register | List snippets | | Create snippet | Upload image | Upload data

209
LINES

< > BotCompany Repo | #1014135 // VisualKatze 5

JavaX source code (desktop) [tags: use-pretranspiled] - run with: x30.jar

Download Jar. Uses 4487K of libraries. Click here for Pure Java version (16782L/133K).

!7

static JLabel l;
static Map<S> consequentsMap = synchroCIMap(), precedentsMap = synchroCIMap();
static Set<S> ignoreSet = asCISet(splitAtVerticalBar("english|englisch|german|deutsch|foreground|fairground|background|Vordergrund|Hintergrund"));
sS lastUtterance, answer;
static L<VF1<S>> receivers;
static JTextField tfInput, tfSay, tfNote;
static JButton btnSay, btnStartStop;
static TailFile logTail;
static JComponent catComponent, otherComponent;
static SingleComponentPanel scp;
sS currentList;
sbool listening;
static TrayIcon trayIcon; // the "start listening" icon

p-noconsole-autoupdate {
  printAndConsoleSize(64K);
  startBotInSeparateVM("Katze.", #1013936);
  hideVMRunningBot("Katze.");
  framesBot();
  
  catComponent = jscroll_verticalExtend_trackWidth(
    jCenteredBackgroundImage(#1101166, setBackground(awtColor("e4e4e4"), setForeground(Color.black,
    setFontSize(60, l = jcenteredlabel_noToolTip())))));
  
  scp = singleComponentPanel(catComponent);
  final JFrame frame = frameIcon(#1101166, showMaximizedFrame(scp));
  addToWindowWithMargin(frame, jcenteredline(
    btnStartStop = jbutton("", f startOrStop),
    btnSay = jbutton("Say", f sayIt)));
    
  awtCalcEveryAndNow(btnStartStop, 2000, r {
    setListening(isYes(sendOpt("Chrome Speech.", "is on")));
  });
  
  // bottom to top
  addToWindowNorth(frame, withMargin(10, 1, withLabel("Add a note:", tfNote = jcenteredtextfield())));
  addToWindowNorth(frame, withMargin(10, 1, withLabel("Make me say something:", tfSay = jcenteredtextfield())));
  addToWindowNorth(frame, withMargin(10, 1, withLabel("Type here instead of speaking:", tfInput = jcenteredtextfield())));
  
  onEnter(tfInput, r-thread { katze_userTyped(getText(tfInput)) });
  onEnter(tfSay, r-thread { speakInRecognizerLanguage(getTextTrim(tfSay)) });
  onEnter(tfNote, r {
    mechAppendQueue().add(r {
      appendToMechList_noUniq("Today's Notes", "[" + localDateWithMilliseconds() + "] " + getTextTrim(tfNote)) })
  });
  focusOnFrameActivationAndNow(frame, btnSay);
    
  receivers = ll(
    consequentsMap_receiver(consequentsMap), precedentsMap_receiver(precedentsMap));
  time "Loading" { loading {
    mapVoidMulti(receivers, notNulls(map prepLine(dropLast(linesOfFile(speechRecognitionLog())))));
  } }
  
  logTail = tailFileLinewisePlusLastLine(speechRecognitionLog(), 100, vf1(f onUtteranceFromLog));
  //makeAndCall_warmUp("mL_anInstanceOfA");
}

sS prepLine(S s) {
  s = unquoteAndDropAngleBracketStuff(s);
  
  // don't react on older messages
  if (logTail != null && logTail.started())
    printIndentUnlessEmpty(">> ", pcallAnswerMethod(s));
    
  ret nullIfInSet(ignoreSet, s);
}

svoid onUtteranceFromLog(S utterance) {
  if (empty(utterance = prepLine(trimLastLine(utterance)))) ret;
  print(">>  " + utterance);
  onUtterance(utterance);
}

svoid onUtterance(S utterance) {
  lastUtterance = utterance;
  mapVoidMulti(receivers, ll(utterance));
  answer = guessAnswer(utterance);
  S both = utterance;
  setTextAndEnabled(btnSay, empty(answer) ? pair("Say", false) : pair("Say " + quote(answer), true));
  if (nempty(answer)) {
    both += "\n\n" + answer;
    logQuotedWithTime("guessLog", both);
  }
  
  setFrameTitle(l, both.replaceAll("\n+", " > ");
  setText(l, jlabel_textAsHTML_center(both));
}

svoid say(S s) { speakInRecognizerLanguage(s); }

answer {
  s = lookupOrKeep(mechMapCI("Mishearing corrections"), s);
  
  if (matchStartOneOf(s, m, "phone say"))
    catSayOnPhone($1);
  else if (matchStartOneOf(s, m, "sag", "say", "please say"))
    thread { say($1); }
  if "thanks"
    say("sure");
  if "higher"
    infoBox("Get higher?");
  if "font size *"
    ret "OK" with revalidate(setFontSize(parseInt($1), l));
  if "foreground|fairground|vordergrund"
    ret "OK" with activateFrame(l);
  if "background|hintergrund"
    ret "OK" with minimizeFrame(l);
  if "standard function|standard functions|steroid function|standard fashion" // Google keeps misunderstanding
    ret "OK" with sfBot_doIt();
  if "move mouse away"
    ret "OK" with moveMouseToTopRightCorner();
  if "mouse to center|mouse to centre|most two centre"
    ret "OK" with moveMouseToScreenCenter();
  if "split ..."
    ok split();
  if "unsplit ..."
    ok scp.setComponent(catComponent);
  if "show list of ..."
    ok openList($1);
  if "add to list"
    addToList(lastUtterance);
}

svoid sayIt {
  fS a = answer;
  if (empty(a)) ret;
  print("Saying: " + a);
  thread { speakInRecognizerLanguage(a); }
  onUtterance(a);
}

sS guessAnswer(S s) {
  new Matches m;
  if (find3plusRestsOneOf(s, m, "Ich heiße", "Ich bin", "Mein Name ist", "Man nennt mich", "Ich werde * genannt", "nennen mich", "nennt mich", "nenne mich"))
    if (eqic($2, "Stefan"))
      ret gudrun_later("Stefan ist ein Programmierer");
    else
      ret gudrun_later("Hallo " + $2);

  try answer mL_answerWithCodeFragments("Witty Answers", s);
  try answer mL_answerWithCodeFragments(s);
  try answer consequentsMap.get(s);
  try answer precedentsMap.get(s);
  null;
}

svoid split(JComponent c) {
  otherComponent = c;
  split();
}

svoid split {
  if (otherComponent == null) otherComponent = jpanel();
  scp.setComponent(setSplitPaneDelayed(0.5, jhsplit(catComponent, otherComponent)));
  activateFrame(scp);
}

static JTextArea openList(S name) {
  currentList = name;
  JTextArea ta;
  split(jsection(name, ta = jtextarea(lines(mLOrCreate(name)))));
  ret ta;
}

svoid addToList(fS s) {
  if (empty(s) || empty(currentList)) ret;
  mechAppendQueue(r {
    appendToMechList(currentList, s);
    final JTextArea ta = openList(currentList);
    if (ta != null) awt {
      int h = ta.getHeight();
      ta.scrollRectToVisible(new Rectangle(0, h-1, 1, 1));
    }
  });
}

svoid startListening {
  setListening_active(true);
}

svoid startOrStop {
  setListening_active(!listening);
}

svoid setListening_active(bool b) {
  setListening(b);
  sendInNewThread("Chrome Speech.", b ? "start recognition" : "stop recognition");
}

svoid setListening(final bool b) {
  swing {
    listening = b;
    setText(btnStartStop, listening ? "Stop listening" : "Start listening");
    if (b) {
      removeTrayIcon(trayIcon);
      trayIcon = null;
    } else if (trayIcon == null)
      trayIcon = installTrayIcon(#1101203, "Start listening", r { startListening() },
        menuItem("Start Listening", f startListening),
        menuItem("Exit", f stopKatze));
  }
}

svoid stopKatze {
  killProgram(#1013936);
  cleanKillVM();
}

Author comment

Began life as a copy of #1014131

download  show line numbers  debug dex  old transpilations   

Travelled to 18 computer(s): anzfkgltbrzg, aoiabmzegqzx, bhatertpkbcr, cbybwowwnfue, cfunsshuasjs, gwrvuhgaqvyk, ishqpsrjomds, lpdgvwnxivlt, mqqgnosmbjvj, nruopgnwybjz, oikenkqtkxpn, onxytkatvevr, pyentgdyhuwx, pzhvpgtvlbxg, tslmcundralx, tvejysmllsmz, vouqrxazstgt, vpdwwinrgdga

No comments. add comment

Snippet ID: #1014135
Snippet name: VisualKatze 5
Eternal ID of this version: #1014135/121
Text MD5: c96728a4fc7765755e502a46a920f3de
Transpilation MD5: 0bed2db11fffb9d730e298b0fe93bcce
Author: stefan
Category: javax / gui
Type: JavaX source code (desktop)
Public (visible to everyone): Yes
Archived (hidden from active list): No
Created/modified: 2018-05-21 10:02:36
Source code size: 6667 bytes / 209 lines
Pitched / IR pitched: No / No
Views / Downloads: 563 / 62316
Version history: 120 change(s)
Referenced in: [show references]