!7 sbool autoClose; static int initialDelay = 0; sbool infoBox, repeatEverything; static int port; static O onUtterance; // voidfunc(S) sS botToSendUtterancesTo; static L webSockets = synchroList(); sbool startRecognition; p { load('botToSendUtterancesTo); infoBox = repeatEverything = isMainProgram(); NanoHTTPD.SOCKET_READ_TIMEOUT = 24*3600*1000; // not long enough - TODO: Fix in NanoHTTPD port = serveHttpWithWebSockets(9999, func(NanoHTTPD.IHTTPSession handshake) { WebSocket ws = new WebSocket(handshake) { protected void onPong(WebSocketFrame pongFrame) { print("pong"); } protected void onMessage(WebSocketFrame messageFrame) { fS s = messageFrame.getTextPayload(); if (repeatEverything) sendOpt("Kevin", infoBoxAndReturn(switcheroo(s))); else infoBoxOrPrint("User said: " + s, infoBox); thread { pcallF(onUtterance, s); if (nempty(botToSendUtterancesTo)) sendOpt(botToSendUtterancesTo, "User said: *", s); } } protected void onClose(WebSocketFrame.CloseCode code, String reason, boolean initiatedByRemote) { webSockets.remove(this); } protected void onException(IOException e) { printStackTrace(e); } }; if (startRecognition) { startRecognition = false; ws.send("start"); } ret addAndReturn(webSockets, ws); }); S url = print("http://localhost:" + port); //openPlatformBrowser(url); nohup("chromium-browser --app=" + url + "/popup"); makeBot("Chrome Speech."); } html { if (neq(uri, "/popup")) ret hbody("Opening popup..." + hjavascript([[ window.open('/popup', 'speech_recognizer', 'width=300,height=300,location=no'); setTimeout(function() { window.close(); }, 10000); ]])); ret hhtml(hhead(htitle("Speech Recognizer")) + hbody(div( h3("Speech Recognizer") + loadJQuery() + hdiv("Results come here", id := 'results, style := "margin: 10px") + hjavascript([[ var websocket; function openWebSocket() { websocket = new WebSocket("ws://localhost:#PORT#/"); websocket.onopen = function(event) { $("#btn").prop('disabled', false); }; websocket.onmessage = function(event) { if (event.data == 'start' && !started) startOrStop(); if (event.data == 'stop' && started) startOrStop(); }; if (#AUTOCLOSE#) websocket.onclose = function(event) { window.close(); }; } setTimeout(openWebSocket, #INITIALDELAY#); var recognition = new webkitSpeechRecognition(); recognition.lang = "en-US"; recognition.onerror = function(event) { $("#results").html("Error: " + event.error); started = false; $("#btn").html("Start recognition"); } recognition.onresult = function(event) { var result = event.results[0]; var transcript = result[0].transcript; $("#results").html("Transcript: " + transcript); websocket.send(transcript); started = false; $("#btn").html("Start recognition"); } var started = false; function startOrStop() { if (started) { recognition.stop(); started = false; $("#btn").html("Start recognition"); } else { recognition.start(); started = true; $("#btn").html("Stop recognition"); } } window.resizeTo(300, 300); ]]).replace("#PORT#", str(port)).replace("#AUTOCLOSE#", autoClose ? "true" : "false").replace("#INITIALDELAY#", str(initialDelay)) + tag('button, "Start recognition", onclick := "startOrStop()", type := 'button, id := 'btn, disabled := 'disabled) //+ p(ahref("#", "Popup", onClick := "window.open('/', 'speech_recognizer', 'width=300,height=300,location=no'); return false;")); , style := "text-align: center")); } svoid startRecognition { L l = cloneList(webSockets); if (empty(l)) startRecognition = true; else { //print("Starting recognition." + (l(l) > 1 ? "Weird: Have " + l(l) + " websockets" : "")); pcall { first(l).send("start"); } } } svoid stopRecognition { if (startRecognition) startRecognition = false; if (nempty(webSockets)) pcall { first(webSockets).send("stop"); } } answer { if "start recognition" { startRecognition(); ret "OK"; } if "stop recognition" { stopRecognition(); ret "OK"; } if "send to bot *" { setAndSave('botToSendUtterancesTo, $1); ret "OK"; } if "what bot are you sending to" ret botToSendUtterancesTo; }