Download Jar. Uses 3874K of libraries. Click here for Pure Java version (12658L/92K).
1 | !7 |
2 | |
3 | sbool autoClose = true; |
4 | static int initialDelay = 0; |
5 | sbool infoBox, repeatEverything; |
6 | |
7 | static int port; |
8 | static O onUtterance; // voidfunc(S) |
9 | static L<S> botsToSendUtterancesTo = ll(); // ll("Voice Actions.", "Show Item Page."); |
10 | static L<WebSocket> webSockets = synchroList(); |
11 | sbool startRecognition; |
12 | static java.util.Timer stopper; |
13 | sS language = "en-US"; |
14 | sS myURL; |
15 | static JButton btn; |
16 | sbool hadAnyWebSockets; // Does Chrome work at all? |
17 | |
18 | p-subst { |
19 | startBotHere("Andy", #1009593); |
20 | startBotHere("Gudrun", #1008549); |
21 | botsToSendUtterancesTo = persistentList("Bots to send utterances to", botsToSendUtterancesTo); |
22 | load('language); |
23 | if (isMainProgram()) { |
24 | infoBox = true; |
25 | repeatEverything = true; |
26 | } |
27 | |
28 | NanoHTTPD.SOCKET_READ_TIMEOUT = 24*3600*1000; // not long enough - TODO: Fix in NanoHTTPD |
29 | |
30 | port = serveHttpWithWebSockets(9999, func(NanoHTTPD.IHTTPSession handshake) { |
31 | WebSocket ws = new WebSocket(handshake) { |
32 | protected void onPong(WebSocketFrame pongFrame) { print("pong"); } |
33 | protected void onMessage(WebSocketFrame messageFrame) ctex { |
34 | cancelTimeout(); |
35 | fS s = messageFrame.getTextPayload(); |
36 | if (repeatEverything) { |
37 | //send("stop"); |
38 | sendOpt(mouth(), infoBoxAndReturn(/*switcheroo*/(s))); |
39 | send("start"); |
40 | } else infoBoxOrPrint(/*"User said: " +*/ s, infoBox); |
41 | thread { |
42 | pcallF(onUtterance, s); |
43 | for (S bot : cloneList(botsToSendUtterancesTo)) { |
44 | print("Sending to bot " + bot); |
45 | sendOptInNewThread(bot, "User said: *", s); |
46 | } |
47 | } |
48 | hotCommands(s); |
49 | } |
50 | protected void onClose(WebSocketFrame.CloseCode code, String reason, boolean initiatedByRemote) { webSockets.remove(this); } |
51 | protected void onException(IOException e) { printStackTrace(e); } |
52 | }; |
53 | if (startRecognition) { |
54 | startRecognition = false; |
55 | ws.send("start"); |
56 | } |
57 | |
58 | // close any other recognizers |
59 | for (WebSocket ws2 : cloneList(webSockets)) { |
60 | pcall { ws2.close(WebSocketFrame.CloseCode.NormalClosure, ""); } |
61 | webSockets.remove(ws2); |
62 | } |
63 | |
64 | hadAnyWebSockets = true; |
65 | |
66 | ret addAndReturn(webSockets, ws); |
67 | }); |
68 | myURL = print("http://localhost:" + port + "/popup"); |
69 | startChromeApp(myURL); |
70 | makeBot("Chrome Speech."); |
71 | |
72 | showControls(jcenteredline(btn = jbutton("Open Speech Recognizer", r { |
73 | startChromeApp(myURL) |
74 | }))); |
75 | awtEvery(btn, 500, r { setEnabled(btn, empty(webSockets)) }); |
76 | |
77 | /*thread "Chrome Re-Starter" { |
78 | sleepSeconds(20); |
79 | repeat with sleep 5 { |
80 | if (hadAnyWebSockets && empty(webSockets)) { |
81 | startChromeApp(myURL); |
82 | sleepSeconds(15); |
83 | } |
84 | } |
85 | });*/ |
86 | } |
87 | |
88 | html { |
89 | if (neq(uri, "/popup")) |
90 | ret hbody("Opening popup..." + hjavascript([[ |
91 | window.open('/popup', 'speech_recognizer', 'width=300,height=300,location=no'); |
92 | setTimeout(function() { window.close(); }, 10000); |
93 | ]])); |
94 | |
95 | ret hhtml(hhead(htitle("Speech Recognizer")) + hbody(div( |
96 | h3("Speech Recognizer") |
97 | + loadJQuery() |
98 | + hdiv("Language: " + language, id := 'lang, style := "font-size: 10px") |
99 | + hdiv("Results come here", id := 'results, style := "margin: 10px") |
100 | + hjavascript([[ |
101 | var websocket; |
102 | var stopUntil = 0; |
103 | |
104 | function stopRecognition() { |
105 | recognition.stop(); |
106 | started = false; |
107 | $("#btn").html("Start recognition"); |
108 | document.title = "Speech Recognizer"; |
109 | } |
110 | |
111 | function startRecognition() { |
112 | if (Date.now() < stopUntil) return; |
113 | recognition.start(); |
114 | started = true; |
115 | $("#btn").html("Stop recognition"); |
116 | document.title = "Listening - Speech Recognizer"; |
117 | } |
118 | |
119 | function restartRecognition() { |
120 | stopRecognition(); |
121 | startRecognition(); |
122 | } |
123 | |
124 | function openWebSocket() { |
125 | websocket = new WebSocket("ws://localhost:#PORT#/"); |
126 | websocket.onopen = function(event) { |
127 | $("#btn").prop('disabled', false); |
128 | $("#results").html("Speak now..."); |
129 | startRecognition(); |
130 | }; |
131 | |
132 | websocket.onmessage = function(event) { |
133 | if (event.data == 'start' && !started) startOrStop(); |
134 | if (event.data == 'stop' && started) { stopUntil = Date.now()+200; stopRecognition(); } |
135 | if (event.data.substring(0, 9) == 'language ') { |
136 | var l = event.data.substring(9); |
137 | recognition.lang = l; |
138 | $("#lang").html("Language: " + l); |
139 | } |
140 | }; |
141 | |
142 | websocket.onclose = function(event) { |
143 | $("#results").html("WebSocket closed"); |
144 | if (#AUTOCLOSE#) window.close(); |
145 | }; |
146 | } |
147 | |
148 | setTimeout(openWebSocket, #INITIALDELAY#); |
149 | |
150 | var recognition = new webkitSpeechRecognition(); |
151 | recognition.lang = "#LANGUAGE#"; |
152 | |
153 | recognition.onerror = function(event) { |
154 | var s = " "; |
155 | if (event.error != "no-speech") s = "Error: " + event.error; |
156 | $("#results").html(s); |
157 | stopRecognition(); |
158 | //setTimeout(startRecognition, 1000); // safety delay |
159 | } |
160 | |
161 | recognition.onresult = function(event) { |
162 | var result = event.results[0]; |
163 | var transcript = result[0].transcript; |
164 | $("#results").html("Transcript: " + transcript); |
165 | websocket.send(transcript); |
166 | stopUntil = Date.now()+200; |
167 | //stopRecognition(); setTimeout(startRecognition, 100); |
168 | } |
169 | |
170 | recognition.onnomatch = function(event) { |
171 | $("#results").html("-"); |
172 | //stopRecognition(); setTimeout(startRecognition, 100); |
173 | } |
174 | |
175 | recognition.onend = function(event) { |
176 | //$("#results").html("-end-"); |
177 | stopRecognition(); |
178 | setTimeout(startRecognition, 100); |
179 | } |
180 | |
181 | var started = false; |
182 | |
183 | function startOrStop() { |
184 | if (started) { |
185 | stopUntil = Date.now()+1000; // block restart by onend handler |
186 | stopRecognition(); |
187 | } else { |
188 | stopRecognition(); startRecognition(); |
189 | } |
190 | } |
191 | |
192 | window.resizeTo(300, 300); |
193 | ]]).replace("#PORT#", str(port)).replace("#AUTOCLOSE#", autoClose ? "true" : "false").replace("#INITIALDELAY#", str(initialDelay)).replace("#LANGUAGE#", language) |
194 | + tag('button, "Start recognition", onclick := "startOrStop()", type := 'button, id := 'btn, disabled := 'disabled) |
195 | //+ p(ahref("#", "Popup", onClick := "window.open('/', 'speech_recognizer', 'width=300,height=300,location=no'); return false;")); |
196 | , style := "text-align: center")); |
197 | } |
198 | |
199 | svoid startRecognition { |
200 | L<WebSocket> l = cloneList(webSockets); |
201 | if (empty(l)) startRecognition = true; |
202 | else { |
203 | //print("Starting recognition." + (l(l) > 1 ? "Weird: Have " + l(l) + " websockets" : "")); |
204 | pcall { |
205 | first(l).send("start"); |
206 | } |
207 | } |
208 | } |
209 | |
210 | svoid stopRecognition { |
211 | if (startRecognition) startRecognition = false; |
212 | if (nempty(webSockets)) pcall { |
213 | first(webSockets).send("stop"); |
214 | } |
215 | } |
216 | |
217 | sS hotCommands(S s) { |
218 | if "english|englisch" ret answer("language " + quote("en-US")); |
219 | if "german|deutsch" ret answer("language " + quote("de-DE")); |
220 | if "stop recognition" { stopRecognition(); ret "OK"; } |
221 | null; |
222 | } |
223 | |
224 | answer { |
225 | try answer hotCommands(s); |
226 | if "start recognition timeout *" { |
227 | final int seconds = parseInt($1); |
228 | startRecognition(); |
229 | stopper = timerOnce(toMS(seconds), f stopRecognition); |
230 | ret "OK"; |
231 | } |
232 | if "start recognition" { startRecognition(); ret "OK"; } |
233 | if "send to bot *" { setAdd(botsToSendUtterancesTo, $1); ret "OK"; } |
234 | if "what bots are you sending to" ret sfu(botsToSendUtterancesTo); |
235 | if "language *" { |
236 | setAndSave('language, $1); |
237 | pcall { if (nempty(webSockets)) first(webSockets).send("language " + $1); } |
238 | stopRecognition(); |
239 | sleep(500); |
240 | startRecognition(); |
241 | ret "OK"; |
242 | } |
243 | } |
244 | |
245 | svoid cancelTimeout { |
246 | if (stopper != null) { stopper.cancel(); stopper = null; } |
247 | } |
248 | |
249 | sS mouth { |
250 | ret eq(language, "de-DE") ? "Gudrun" : "Andy"; |
251 | } |
download show line numbers debug dex old transpilations
Travelled to 13 computer(s): aoiabmzegqzx, bhatertpkbcr, cbybwowwnfue, cfunsshuasjs, gwrvuhgaqvyk, ishqpsrjomds, lpdgvwnxivlt, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tslmcundralx, tvejysmllsmz, vouqrxazstgt
No comments. add comment
Snippet ID: | #1009886 |
Snippet name: | Continuous Recognition Test [WORKS except for proper stop button, standalone] |
Eternal ID of this version: | #1009886/1 |
Text MD5: | 3348f7098ef2384521191e8006b30e5e |
Transpilation MD5: | 4f0386260a3b53964ab478ec81b8b94f |
Author: | stefan |
Category: | javax / speech |
Type: | JavaX source code (desktop) |
Public (visible to everyone): | Yes |
Archived (hidden from active list): | No |
Created/modified: | 2017-08-23 18:21:59 |
Source code size: | 8056 bytes / 251 lines |
Pitched / IR pitched: | No / No |
Views / Downloads: | 554 / 1143 |
Referenced in: | [show references] |