Not logged in.  Login/Logout/Register | List snippets | | Create snippet | Upload image | Upload data

251
LINES

< > BotCompany Repo | #1009886 // Continuous Recognition Test [WORKS except for proper stop button, standalone]

JavaX source code (desktop) [tags: use-pretranspiled] - run with: x30.jar

Download Jar. Uses 3874K of libraries. Click here for Pure Java version (12658L/92K).

!7

sbool autoClose = true;
static int initialDelay = 0;
sbool infoBox, repeatEverything;

static int port;
static O onUtterance; // voidfunc(S)
static L<S> botsToSendUtterancesTo = ll(); // ll("Voice Actions.", "Show Item Page.");
static L<WebSocket> webSockets = synchroList();
sbool startRecognition;
static java.util.Timer stopper;
sS language = "en-US";
sS myURL;
static JButton btn;
sbool hadAnyWebSockets; // Does Chrome work at all?

p-subst {
  startBotHere("Andy", #1009593);
  startBotHere("Gudrun", #1008549);
  botsToSendUtterancesTo = persistentList("Bots to send utterances to", botsToSendUtterancesTo);
  load('language);
  if (isMainProgram()) {
    infoBox = true;
    repeatEverything = true;
  }
  
  NanoHTTPD.SOCKET_READ_TIMEOUT = 24*3600*1000; // not long enough - TODO: Fix in NanoHTTPD
  
  port = serveHttpWithWebSockets(9999, func(NanoHTTPD.IHTTPSession handshake) {
    WebSocket ws = new WebSocket(handshake) {
      protected void onPong(WebSocketFrame pongFrame) { print("pong"); }
      protected void onMessage(WebSocketFrame messageFrame) ctex {
        cancelTimeout();
        fS s = messageFrame.getTextPayload();
        if (repeatEverything) {
          //send("stop");
          sendOpt(mouth(), infoBoxAndReturn(/*switcheroo*/(s)));
          send("start");
        } else infoBoxOrPrint(/*"User said: " +*/ s, infoBox);
        thread {
          pcallF(onUtterance, s);
          for (S bot : cloneList(botsToSendUtterancesTo)) {
            print("Sending to bot " + bot);
            sendOptInNewThread(bot, "User said: *", s);
          }
        }
        hotCommands(s);
      }
      protected void onClose(WebSocketFrame.CloseCode code, String reason, boolean initiatedByRemote) { webSockets.remove(this); }
      protected void onException(IOException e) { printStackTrace(e); }
    };
    if (startRecognition) {
      startRecognition = false;
      ws.send("start");
    }
    
    // close any other recognizers
    for (WebSocket ws2 : cloneList(webSockets)) {
      pcall { ws2.close(WebSocketFrame.CloseCode.NormalClosure, ""); }
      webSockets.remove(ws2);
    }
    
    hadAnyWebSockets = true;
    
    ret addAndReturn(webSockets, ws);
  });
  myURL = print("http://localhost:" + port + "/popup");
  startChromeApp(myURL);
  makeBot("Chrome Speech.");
  
  showControls(jcenteredline(btn = jbutton("Open Speech Recognizer", r {
    startChromeApp(myURL)
  })));
  awtEvery(btn, 500, r { setEnabled(btn, empty(webSockets)) });
  
  /*thread "Chrome Re-Starter" {
    sleepSeconds(20);
    repeat with sleep 5 {
      if (hadAnyWebSockets && empty(webSockets)) {
        startChromeApp(myURL);
        sleepSeconds(15);
      }
    }
  });*/
}

html {
  if (neq(uri, "/popup"))
    ret hbody("Opening popup..." + hjavascript([[
      window.open('/popup', 'speech_recognizer', 'width=300,height=300,location=no');
      setTimeout(function() { window.close(); }, 10000);
    ]]));

  ret hhtml(hhead(htitle("Speech Recognizer")) + hbody(div(
    h3("Speech Recognizer")
    + loadJQuery()
    + hdiv("Language: " + language, id := 'lang, style := "font-size: 10px")
    + hdiv("Results come here", id := 'results, style := "margin: 10px")
  + hjavascript([[
    var websocket;
    var stopUntil = 0;
    
    function stopRecognition() {
      recognition.stop();
      started = false;
      $("#btn").html("Start recognition");
      document.title = "Speech Recognizer";
    }
    
    function startRecognition() {
      if (Date.now() < stopUntil) return;
      recognition.start();
      started = true;
      $("#btn").html("Stop recognition");
      document.title = "Listening - Speech Recognizer";
    }
    
    function restartRecognition() {
      stopRecognition();
      startRecognition();
    }
    
    function openWebSocket() {
      websocket = new WebSocket("ws://localhost:#PORT#/");
      websocket.onopen = function(event) {
        $("#btn").prop('disabled', false);
        $("#results").html("Speak now...");
        startRecognition();
      };
    
      websocket.onmessage = function(event) {
        if (event.data == 'start' && !started) startOrStop();
        if (event.data == 'stop' && started) { stopUntil = Date.now()+200; stopRecognition(); }
        if (event.data.substring(0, 9) == 'language ') {
          var l = event.data.substring(9);
          recognition.lang = l;
          $("#lang").html("Language: " + l);
        }
      };
    
      websocket.onclose = function(event) {
        $("#results").html("WebSocket closed");
        if (#AUTOCLOSE#) window.close();
      };
    }
    
    setTimeout(openWebSocket, #INITIALDELAY#);
      
    var recognition = new webkitSpeechRecognition();
    recognition.lang = "#LANGUAGE#";
    
    recognition.onerror = function(event) { 
      var s = "&nbsp;";
      if (event.error != "no-speech") s = "Error: " + event.error;
      $("#results").html(s);
      stopRecognition();
      //setTimeout(startRecognition, 1000); // safety delay
    }
    
    recognition.onresult = function(event) { 
      var result = event.results[0];
      var transcript = result[0].transcript;
      $("#results").html("Transcript: " + transcript);
      websocket.send(transcript);
      stopUntil = Date.now()+200;
      //stopRecognition(); setTimeout(startRecognition, 100);
    }
    
    recognition.onnomatch = function(event) { 
      $("#results").html("-");
      //stopRecognition(); setTimeout(startRecognition, 100);
    }
    
    recognition.onend = function(event) { 
      //$("#results").html("-end-");
      stopRecognition();
      setTimeout(startRecognition, 100);
    }
    
    var started = false;
    
    function startOrStop() {
      if (started) {
        stopUntil = Date.now()+1000; // block restart by onend handler
        stopRecognition();
      } else {
        stopRecognition(); startRecognition();
      }
    }
    
    window.resizeTo(300, 300);
  ]]).replace("#PORT#", str(port)).replace("#AUTOCLOSE#", autoClose ? "true" : "false").replace("#INITIALDELAY#", str(initialDelay)).replace("#LANGUAGE#", language)
    + tag('button, "Start recognition", onclick := "startOrStop()", type := 'button, id := 'btn, disabled := 'disabled)
    //+ p(ahref("#", "Popup", onClick := "window.open('/', 'speech_recognizer', 'width=300,height=300,location=no'); return false;"));
  , style := "text-align: center"));
}

svoid startRecognition {
  L<WebSocket> l = cloneList(webSockets);
  if (empty(l)) startRecognition = true;
  else {
    //print("Starting recognition." + (l(l) > 1 ? "Weird: Have " + l(l) + " websockets" : ""));
    pcall {
      first(l).send("start");
    }
  }
}

svoid stopRecognition {
  if (startRecognition) startRecognition = false;
  if (nempty(webSockets)) pcall {
    first(webSockets).send("stop");
  }
}

sS hotCommands(S s) {
  if "english|englisch" ret answer("language " + quote("en-US"));
  if "german|deutsch" ret answer("language " + quote("de-DE"));
  if "stop recognition" { stopRecognition(); ret "OK"; }
  null;
}

answer {
  try answer hotCommands(s);
  if "start recognition timeout *" {
    final int seconds = parseInt($1);
    startRecognition();
    stopper = timerOnce(toMS(seconds), f stopRecognition);
    ret "OK";
  }
  if "start recognition" { startRecognition(); ret "OK"; }
  if "send to bot *" { setAdd(botsToSendUtterancesTo, $1); ret "OK"; }
  if "what bots are you sending to" ret sfu(botsToSendUtterancesTo);
  if "language *" {
    setAndSave('language, $1);
    pcall { if (nempty(webSockets)) first(webSockets).send("language " + $1); }
    stopRecognition();
    sleep(500);
    startRecognition();
    ret "OK";
  }
}

svoid cancelTimeout {
  if (stopper != null) { stopper.cancel(); stopper = null; }
}

sS mouth {
  ret eq(language, "de-DE") ? "Gudrun" : "Andy";
}

download  show line numbers  debug dex  old transpilations   

Travelled to 13 computer(s): aoiabmzegqzx, bhatertpkbcr, cbybwowwnfue, cfunsshuasjs, gwrvuhgaqvyk, ishqpsrjomds, lpdgvwnxivlt, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tslmcundralx, tvejysmllsmz, vouqrxazstgt

No comments. add comment

Snippet ID: #1009886
Snippet name: Continuous Recognition Test [WORKS except for proper stop button, standalone]
Eternal ID of this version: #1009886/1
Text MD5: 3348f7098ef2384521191e8006b30e5e
Transpilation MD5: 4f0386260a3b53964ab478ec81b8b94f
Author: stefan
Category: javax / speech
Type: JavaX source code (desktop)
Public (visible to everyone): Yes
Archived (hidden from active list): No
Created/modified: 2017-08-23 18:21:59
Source code size: 8056 bytes / 251 lines
Pitched / IR pitched: No / No
Views / Downloads: 436 / 851
Referenced in: [show references]