Uses 1113K of libraries. Click here for Pure Java version (19810L/117K).
1 | !7 |
2 | |
3 | concept StandardScript { |
4 | S scriptID; |
5 | } |
6 | |
7 | cmodule2 Cruddie > DynPrintLogAndEnabled { |
8 | set flag NoNanoHTTPD. |
9 | !include #1029545 // API for Eleu |
10 | |
11 | transient S salt; |
12 | transient WebChatBot chatBot; |
13 | transient CRUD<StandardScript> standardScriptsCRUD; |
14 | transient CRUD<Conversation> conversationsCRUD; |
15 | |
16 | switchable int vadUpdateInterval = 100; |
17 | switchable double listenTime = 3.0; // listen for 3 seconds after voice activity |
18 | switchable double listenTimeAfterActualSpeech = 10.0; // listen for 10 seconds after actual speech recognized |
19 | switchable double transcriptTitleShowTime = 5.0; // how long to show recognized text in window title |
20 | switchable bool showVadStatus; |
21 | switchable int defaultHumVolume = 0; |
22 | |
23 | S myLink() { ret "https://cruddie.site/"; } |
24 | S botLink() { ret "bot"; /*ret appendWithSlash(myLink(), "bot");*/ } |
25 | |
26 | switchable S frontendModuleLibID = "#1027675/ChatBotFrontend"; |
27 | switchable S backendModuleLibID = "#1027591/DynamicClassesMultiCRUD"; |
28 | transient S cmdsSnippetID = #1027616; |
29 | |
30 | start { |
31 | db(); |
32 | standardScriptsCRUD = new CRUD(StandardScript); |
33 | conversationsCRUD = new CRUD(Conversation); |
34 | conversationsCRUD.ensureIndexed = true; |
35 | thread enter { pcall { |
36 | File saltFile = secretProgramFile("salt.txt"); |
37 | S salt = trimLoadTextFile(saltFile); |
38 | if (empty(salt)) { |
39 | saveTextFile(saltFile, salt = randomID()); |
40 | print("Made salt"); |
41 | } |
42 | dm_restartOnFieldChange enabled(); |
43 | if (!enabled) ret; |
44 | chatBot = new WebChatBot; |
45 | chatBot.forceCookie = true; |
46 | chatBot.preprocess = s -> { |
47 | S s2 = googleDecensor(s); |
48 | print("Preprocessing: " + s + " => " + s2); |
49 | ret s2; |
50 | }; |
51 | chatBot.templateID = #1027690; |
52 | chatBot.baseLink = botLink(); |
53 | chatBot.thoughtBot = new ThoughtBot; |
54 | |
55 | chatBot.jsOnMsgHTML = "window.processNewStuff(src);"; |
56 | |
57 | chatBot.onBotShown = [[ { |
58 | var input = $("#status_message")[0]; |
59 | console.log("input: " + input); |
60 | if (input) |
61 | new Awesomplete(input, { |
62 | minChars: 1, |
63 | list: [ |
64 | "I call you Fido", |
65 | "What is your name?", |
66 | 'add script "#1027704/SomeCruddieScripts/RepeatAfterMe"', |
67 | 'add script "#1027704/SomeCruddieScripts/GoPublic"', |
68 | 'clear scripts' |
69 | ] |
70 | }); |
71 | } ]]; |
72 | |
73 | chatBot.afterHeading = "` + ('webkitSpeechRecognition' in window ? ` " + tag("button", "...", onclick := lineBreaksToSpaces([[ |
74 | startOrStopSpeechRecog(); |
75 | if (bigOn) { lastHadVoice = Date.now(); startVAD(); startUpdater(); humOn(); } |
76 | else stopVAD(); |
77 | ]]), type := 'button, class := 'speechOnBtn, disabled := 'disabled, display := 'inline) |
78 | /*+ hjs([[console.log("Updating"); window.srUpdate();]])*/ + "` : ``) + `" |
79 | + hdiv(hsnippetimg(#1102938, width := 24, height := 24, title := "Streaming audio to cloud"), style := "display: inline; visibility: hidden; margin-left: 10px", class := "listenStatus") |
80 | + (!showVadStatus ? "" : hdiv(hsnippetimg(#1102908, width := 24, height := 24, title := "Someone is speaking (either me or you)"), style := "display: inline; visibility: hidden; margin-left: 10px", class := "vadStatus")) |
81 | + hdiv(small("Note: All conversations are public rn " + targetBlank("https://www.youtube.com/watch?v=StxQerL0D-o", "(why)"))); |
82 | |
83 | chatBot.moreStuff = "window.srUpdate();"; |
84 | |
85 | chatBot.start(); |
86 | }} |
87 | } |
88 | |
89 | O html(virtual Request request) { try { |
90 | S uri = cast get(request, 'uri); |
91 | SS params = cast get(request, 'params); |
92 | print(+params); |
93 | |
94 | //S cookie = serveHttp_cookieHandling(); |
95 | |
96 | // new-style (server-generated) cookie |
97 | S newStyleCookie = cast call(request, 'cookie); |
98 | print("cookie", newStyleCookie); |
99 | |
100 | WebChatBot.Request botReq = chatBot.newRequest("/", params); |
101 | botReq.clientIP = assertNotNull("client ip", (S) dm_call(request, 'clientIP)); |
102 | botReq.cookie = newStyleCookie; |
103 | |
104 | new Matches m; |
105 | S uri2 = appendSlash(uri); |
106 | if (startsWith(uri2, "/bot/", m)) { |
107 | botReq.uri = "/" + m.rest(); |
108 | ret botReq.html(); |
109 | } |
110 | if (eq(uri, "/awesomplete.css")) ret subBot_serveWithContentType(loadSnippet(#2000595), "text/css"); |
111 | if (eq(uri, "/awesomplete.js")) ret subBot_serveText(loadSnippet(#2000594)); |
112 | if (endsWith(uri, ".map")) ret ""; |
113 | |
114 | if (eq(uri, "/frames")) |
115 | ret serveFrameSet(params); |
116 | |
117 | S jsOnSpeech = [[ |
118 | console.log("Got speech: " + transcript); |
119 | if (transcript == 'stop listening') |
120 | stopVAD(); |
121 | else |
122 | window.submitAMsg(transcript); |
123 | lastHeard = transcript; |
124 | lastHeardWhen = Date.now(); |
125 | ]]; |
126 | |
127 | S sayBotMsgsScript = [[ |
128 | window.processNewStuff = function(src) { |
129 | ]] + (eq(params.get('quiet), "1") ? "" : [[ |
130 | if ($("#speechResults") == null) return; // no speech |
131 | // we assume that webkit speech synthesis is present |
132 | // when there is webkit speech recognition |
133 | if (!bigOn) return; // not enabled |
134 | console.log("Got speech"); |
135 | var match = src.match(/\d+/); |
136 | if (match == null) return; |
137 | if (src.match(/NEW DIALOG -->/)) return; |
138 | console.log("Got incremental"); |
139 | var re = /bot-utterance">(.*?)</g; |
140 | var match = re.exec(src); |
141 | var lastUtterance = null; |
142 | while (match != null) { |
143 | lastUtterance = match[1]; |
144 | match = re.exec(src); |
145 | } |
146 | // TODO: properly drop HTML tags/HTML-decode |
147 | if (lastUtterance) |
148 | say(lastUtterance); |
149 | ]]) + [[ |
150 | }; |
151 | ]]; |
152 | |
153 | // old-style cookie (client-generated) |
154 | // S botScript = (S) chatBot.html("/", litmap(), returnJS := true); |
155 | |
156 | S botScript = cast botReq.html(returnJS := true); |
157 | |
158 | int humVolume = defaultHumVolume; |
159 | |
160 | S googleClientID = cast call(request, 'googleClientID); |
161 | |
162 | ret hhtml(hmobilefix() + hhead( |
163 | htitle("CRUDDIE - I manage your anything") |
164 | + hLoadJQuery2() |
165 | + hjs_humWithFade(humVolume/100.0) |
166 | + hJsMakeCookie() |
167 | + [[<link rel="stylesheet" href="awesomplete.css" /><script src="awesomplete.js"></script>]] // took out async |
168 | + googleSignIn_header("", googleClientID) |
169 | ) |
170 | + hbody(hOnBottom( |
171 | p(hsnippetimage(#1102905)) |
172 | + p("Work in progress") |
173 | + p("Hum volume (sound when listening): " + htextinput("", humVolume, style := "width: 5ch", id := "humVolumeInput", onInput := "updateHumVolume(parseInt(this.value)/100.0)")) |
174 | |
175 | + p(hcheckboxWithText("antiFeedback", "Anti-feedback [when using speakers]", true, onclick := "antiFeedback = this.checked;")) |
176 | |
177 | // make checkbox label non-bold |
178 | + hcss("label { font-weight: normal !important; }") |
179 | |
180 | + stats() |
181 | + (empty(googleClientID) ? "" : |
182 | p(googleSignIn_signInButton(myLink() + "google-verify") + " " + googleSignIn_signOutTextLink())) |
183 | + hSpeechRecognition(jsOnSpeech, true, "en-US", false, |
184 | noWebKit := p("Use Chrome if you want speech recognition")) |
185 | + hjavascript([[ |
186 | function say(text) { |
187 | console.log("Saying: " + text); |
188 | var u = new SpeechSynthesisUtterance(text); |
189 | u.lang = 'en-US'; |
190 | u.onstart = function() { console.log("speech start"); meSpeaking = true; }; |
191 | u.onend = function() { meSpeaking = false; }; |
192 | window.speechSynthesis.speak(u); |
193 | } |
194 | ]] + sayBotMsgsScript) |
195 | + hjs(botScript) |
196 | + hVAD( |
197 | [[console.log("voice start"); $(".vadStatus").css("visibility", "visible");]], |
198 | [[console.log("voice stop"); $(".vadStatus").css("visibility", "hidden");]], |
199 | false) |
200 | + hjs_setTitleStatus() |
201 | + hjs(replaceDollarVars([[ |
202 | var updater; |
203 | var lastHadVoice = 0; |
204 | var lastHeard, lastHeardWhen = 0; |
205 | var meSpeaking = false; |
206 | var antiFeedback = true; |
207 | |
208 | //audioMeterDebug = true; |
209 | |
210 | function startUpdater() { |
211 | if (updater) return; |
212 | console.log("Starting updater"); |
213 | updater = setInterval(vadMagicUpdate, $interval); |
214 | srPause = true; |
215 | } |
216 | |
217 | function stopUpdater() { |
218 | if (!updater) return; |
219 | console.log("Stopping updater"); |
220 | clearInterval(updater); |
221 | updater = null; |
222 | window.resetTitle(); |
223 | } |
224 | |
225 | function vadMagicUpdate() { |
226 | var now = Date.now(); |
227 | var hasVoice = vadHasVoice(); |
228 | var clipping = vadHasClipping(); |
229 | if (hasVoice) lastHadVoice = now; |
230 | var shouldListen1 = bigOn && (lastHadVoice >= now-$listenTime || lastHeardWhen >= now-$listenTimeAfterActualSpeech); |
231 | var shouldListen = !(meSpeaking && antiFeedback) && shouldListen1; |
232 | var titleStatus = ""; |
233 | if (lastHeardWhen >= now-$transcriptTitleShowTime) |
234 | titleStatus = lastHeard + " |"; |
235 | else if (shouldListen) |
236 | titleStatus = $listeningSymbol; |
237 | else if (bigOn) |
238 | titleStatus = $ear; |
239 | if (clipping) |
240 | titleStatus = "! " + titleStatus; |
241 | window.setTitleStatus(titleStatus); |
242 | if (srPause != !shouldListen) { |
243 | console.log(shouldListen ? "Listening" : "Not listening"); |
244 | srPause = !shouldListen; |
245 | srUpdate(); |
246 | } |
247 | if (shouldListen1) humOn(); else humOff(); |
248 | if (!bigOn) { stopUpdater(); return; } |
249 | } |
250 | |
251 | // debug mic level |
252 | /*setInterval(function() { |
253 | if (audioMeter) |
254 | console.log("Mic level: " + audioMeter.absLevel); |
255 | }, 1000);*/ |
256 | ]], |
257 | interval := vadUpdateInterval, |
258 | listenTime := toMS(listenTime), |
259 | listenTimeAfterActualSpeech := toMS(listenTimeAfterActualSpeech), |
260 | transcriptTitleShowTime := toMS(transcriptTitleShowTime), |
261 | listeningSymbol := jsQuote(/*"[LISTENING]"*/unicode_cloud()), |
262 | ear := jsQuote(unicode_ear()))) |
263 | )/*, onLoad := "startAwesomplete()"*/)); |
264 | } catch e { printStackTrace(e); throw rethrow(e); } |
265 | } |
266 | |
267 | S cookieToCaseID(S cookie) { |
268 | ret md5(cookie + salt); |
269 | } |
270 | |
271 | class Request { |
272 | S cookie, caseID; |
273 | S frontend, backend; // module IDs |
274 | |
275 | *(S *cookie) { |
276 | caseID = cookieToCaseID(cookie); |
277 | frontend = dm_makeModuleWithParams_systemQ(frontendModuleLibID, +caseID); |
278 | backend = dm_makeModuleWithParams_systemQ(backendModuleLibID, +caseID); |
279 | dm_call(frontend, 'connectToBackend, backend); |
280 | dm_call(frontend, 'importCmdsFromSnippetIfEmpty, cmdsSnippetID); |
281 | dm_call(frontend, 'addScripts, collect scriptID(list StandardScript())); |
282 | Conversation conv = uniq Conversation(+cookie); |
283 | forwardSwappableFunctionToObject(dm_mod(frontend), |
284 | 'chatLog_userMessagesOnly, func -> LS { |
285 | map(m -> m.text, filter(conv.allMsgs(), m -> m.fromUser)) |
286 | }, 'get); |
287 | printVars(+caseID, +backend); |
288 | } |
289 | } |
290 | |
291 | class ThoughtBot { |
292 | new ThreadLocal<Request> request; |
293 | |
294 | void setSession(S cookie, SS params) { |
295 | //session.set(uniq_sync(Session, +cookie)); |
296 | request.set(new Request(cookie)); |
297 | } |
298 | |
299 | S initialMessage() { |
300 | //ret "Hello from module " + request->backend; |
301 | ret (S) dm_call(request->backend, 'answer, "stats"); |
302 | } |
303 | |
304 | S answer(S s) { |
305 | ret (S) dm_call(request->frontend, 'answer, s); |
306 | } |
307 | } |
308 | |
309 | S stats() { |
310 | ret p(joinWithBR( |
311 | "Server temperature is " + dm_cpuTemperature(), |
312 | n2(numberOfCruddies(), "cruddie") + ", " + n2(vmBus_countResponses chatBotFrontend()) + " loaded", |
313 | )); |
314 | } |
315 | |
316 | int numberOfCruddies() { |
317 | ret countDirsInDir(getProgramDir(beforeSlash(frontendModuleLibID))); |
318 | } |
319 | |
320 | visualize { |
321 | JComponent c = jtabs("Main", super.visualize(), |
322 | "Standard Scripts", standardScriptsCRUD.visualizeWithCountInTab(), |
323 | "Conversations", conversationsCRUD.visualizeWithCountInTab()); |
324 | standardScriptsCRUD.updateTabTitle(); |
325 | conversationsCRUD.updateTabTitle(); |
326 | ret c; |
327 | } |
328 | |
329 | S serveFrameSet(SS params) { |
330 | ret hhtml(hhead_title("CRUDDIE with frames") + |
331 | tag frameset( |
332 | tag frame("", name := "leftmonitor") + |
333 | tag frame("", src := appendParamsToURL(myLink(), params)) + |
334 | tag frame("", name := "rightmonitor"), cols := "*,550,*")); |
335 | } |
336 | |
337 | // API / user-callable |
338 | |
339 | void deleteAllConversations { |
340 | cdelete(Conversation); |
341 | } |
342 | } |
Began life as a copy of #1027610
download show line numbers debug dex old transpilations
Travelled to 7 computer(s): bhatertpkbcr, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt, xrpafgyirdlv
No comments. add comment
Snippet ID: | #1028961 |
Snippet name: | cruddie.botcompany.de [LIVE] |
Eternal ID of this version: | #1028961/34 |
Text MD5: | 9dfa57c83aea0e345bd2d345239a2d27 |
Transpilation MD5: | 61f0d2beb1e79c5882dc5c27c5044884 |
Author: | stefan |
Category: | javax |
Type: | JavaX source code (Dynamic Module) |
Public (visible to everyone): | Yes |
Archived (hidden from active list): | No |
Created/modified: | 2021-09-05 09:48:16 |
Source code size: | 12793 bytes / 342 lines |
Pitched / IR pitched: | No / No |
Views / Downloads: | 346 / 44222 |
Version history: | 33 change(s) |
Referenced in: | [show references] |