!7 concept StandardScript { S scriptID; } cmodule2 Cruddie > DynPrintLogAndEnabled { set flag NoNanoHTTPD. !include #1029545 // API for Eleu transient S salt; transient WebChatBot chatBot; transient CRUD standardScriptsCRUD; transient CRUD conversationsCRUD; switchable int vadUpdateInterval = 100; switchable double listenTime = 3.0; // listen for 3 seconds after voice activity switchable double listenTimeAfterActualSpeech = 10.0; // listen for 10 seconds after actual speech recognized switchable double transcriptTitleShowTime = 5.0; // how long to show recognized text in window title switchable bool showVadStatus; switchable int defaultHumVolume = 0; S myLink() { ret "https://cruddie.site/"; } S botLink() { ret "bot"; /*ret appendWithSlash(myLink(), "bot");*/ } switchable S frontendModuleLibID = "#1027675/ChatBotFrontend"; switchable S backendModuleLibID = "#1027591/DynamicClassesMultiCRUD"; transient S cmdsSnippetID = #1027616; start { db(); standardScriptsCRUD = new CRUD(StandardScript); conversationsCRUD = new CRUD(Conversation); conversationsCRUD.ensureIndexed = true; thread enter { pcall { File saltFile = secretProgramFile("salt.txt"); S salt = trimLoadTextFile(saltFile); if (empty(salt)) { saveTextFile(saltFile, salt = randomID()); print("Made salt"); } dm_restartOnFieldChange enabled(); if (!enabled) ret; chatBot = new WebChatBot; chatBot.forceCookie = true; chatBot.preprocess = s -> { S s2 = googleDecensor(s); print("Preprocessing: " + s + " => " + s2); ret s2; }; chatBot.templateID = #1027690; chatBot.baseLink = botLink(); chatBot.thoughtBot = new ThoughtBot; chatBot.jsOnMsgHTML = "window.processNewStuff(src);"; chatBot.onBotShown = [[ { var input = $("#status_message")[0]; console.log("input: " + input); if (input) new Awesomplete(input, { minChars: 1, list: [ "I call you Fido", "What is your name?", 'add script "#1027704/SomeCruddieScripts/RepeatAfterMe"', 'add script "#1027704/SomeCruddieScripts/GoPublic"', 'clear scripts' ] }); } ]]; chatBot.afterHeading = "` + ('webkitSpeechRecognition' in window ? `   " + tag("button", "...", onclick := lineBreaksToSpaces([[ startOrStopSpeechRecog(); if (bigOn) { lastHadVoice = Date.now(); startVAD(); startUpdater(); humOn(); } else stopVAD(); ]]), type := 'button, class := 'speechOnBtn, disabled := 'disabled, display := 'inline) /*+ hjs([[console.log("Updating"); window.srUpdate();]])*/ + "` : ``) + `" + hdiv(hsnippetimg(#1102938, width := 24, height := 24, title := "Streaming audio to cloud"), style := "display: inline; visibility: hidden; margin-left: 10px", class := "listenStatus") + (!showVadStatus ? "" : hdiv(hsnippetimg(#1102908, width := 24, height := 24, title := "Someone is speaking (either me or you)"), style := "display: inline; visibility: hidden; margin-left: 10px", class := "vadStatus")) + hdiv(small("Note: All conversations are public rn " + targetBlank("https://www.youtube.com/watch?v=StxQerL0D-o", "(why)"))); chatBot.moreStuff = "window.srUpdate();"; chatBot.start(); }} } O html(virtual Request request) { try { S uri = cast get(request, 'uri); SS params = cast get(request, 'params); print(+params); //S cookie = serveHttp_cookieHandling(); // new-style (server-generated) cookie S newStyleCookie = cast call(request, 'cookie); print("cookie", newStyleCookie); WebChatBot.Request botReq = chatBot.newRequest("/", params); botReq.clientIP = assertNotNull("client ip", (S) dm_call(request, 'clientIP)); botReq.cookie = newStyleCookie; new Matches m; S uri2 = appendSlash(uri); if (startsWith(uri2, "/bot/", m)) { botReq.uri = "/" + m.rest(); ret botReq.html(); } if (eq(uri, "/awesomplete.css")) ret subBot_serveWithContentType(loadSnippet(#2000595), "text/css"); if (eq(uri, "/awesomplete.js")) ret subBot_serveText(loadSnippet(#2000594)); if (endsWith(uri, ".map")) ret ""; if (eq(uri, "/frames")) ret serveFrameSet(params); S jsOnSpeech = [[ console.log("Got speech: " + transcript); if (transcript == 'stop listening') stopVAD(); else window.submitAMsg(transcript); lastHeard = transcript; lastHeardWhen = Date.now(); ]]; S sayBotMsgsScript = [[ window.processNewStuff = function(src) { ]] + (eq(params.get('quiet), "1") ? "" : [[ if ($("#speechResults") == null) return; // no speech // we assume that webkit speech synthesis is present // when there is webkit speech recognition if (!bigOn) return; // not enabled console.log("Got speech"); var match = src.match(/\d+/); if (match == null) return; if (src.match(/NEW DIALOG -->/)) return; console.log("Got incremental"); var re = /bot-utterance">(.*?)]] // took out async + googleSignIn_header("", googleClientID) ) + hbody(hOnBottom( p(hsnippetimage(#1102905)) + p("Work in progress") + p("Hum volume (sound when listening): " + htextinput("", humVolume, style := "width: 5ch", id := "humVolumeInput", onInput := "updateHumVolume(parseInt(this.value)/100.0)")) + p(hcheckboxWithText("antiFeedback", "Anti-feedback [when using speakers]", true, onclick := "antiFeedback = this.checked;")) // make checkbox label non-bold + hcss("label { font-weight: normal !important; }") + stats() + (empty(googleClientID) ? "" : p(googleSignIn_signInButton(myLink() + "google-verify") + " " + googleSignIn_signOutTextLink())) + hSpeechRecognition(jsOnSpeech, true, "en-US", false, noWebKit := p("Use Chrome if you want speech recognition")) + hjavascript([[ function say(text) { console.log("Saying: " + text); var u = new SpeechSynthesisUtterance(text); u.lang = 'en-US'; u.onstart = function() { console.log("speech start"); meSpeaking = true; }; u.onend = function() { meSpeaking = false; }; window.speechSynthesis.speak(u); } ]] + sayBotMsgsScript) + hjs(botScript) + hVAD( [[console.log("voice start"); $(".vadStatus").css("visibility", "visible");]], [[console.log("voice stop"); $(".vadStatus").css("visibility", "hidden");]], false) + hjs_setTitleStatus() + hjs(replaceDollarVars([[ var updater; var lastHadVoice = 0; var lastHeard, lastHeardWhen = 0; var meSpeaking = false; var antiFeedback = true; //audioMeterDebug = true; function startUpdater() { if (updater) return; console.log("Starting updater"); updater = setInterval(vadMagicUpdate, $interval); srPause = true; } function stopUpdater() { if (!updater) return; console.log("Stopping updater"); clearInterval(updater); updater = null; window.resetTitle(); } function vadMagicUpdate() { var now = Date.now(); var hasVoice = vadHasVoice(); var clipping = vadHasClipping(); if (hasVoice) lastHadVoice = now; var shouldListen1 = bigOn && (lastHadVoice >= now-$listenTime || lastHeardWhen >= now-$listenTimeAfterActualSpeech); var shouldListen = !(meSpeaking && antiFeedback) && shouldListen1; var titleStatus = ""; if (lastHeardWhen >= now-$transcriptTitleShowTime) titleStatus = lastHeard + " |"; else if (shouldListen) titleStatus = $listeningSymbol; else if (bigOn) titleStatus = $ear; if (clipping) titleStatus = "! " + titleStatus; window.setTitleStatus(titleStatus); if (srPause != !shouldListen) { console.log(shouldListen ? "Listening" : "Not listening"); srPause = !shouldListen; srUpdate(); } if (shouldListen1) humOn(); else humOff(); if (!bigOn) { stopUpdater(); return; } } // debug mic level /*setInterval(function() { if (audioMeter) console.log("Mic level: " + audioMeter.absLevel); }, 1000);*/ ]], interval := vadUpdateInterval, listenTime := toMS(listenTime), listenTimeAfterActualSpeech := toMS(listenTimeAfterActualSpeech), transcriptTitleShowTime := toMS(transcriptTitleShowTime), listeningSymbol := jsQuote(/*"[LISTENING]"*/unicode_cloud()), ear := jsQuote(unicode_ear()))) )/*, onLoad := "startAwesomplete()"*/)); } catch e { printStackTrace(e); throw rethrow(e); } } S cookieToCaseID(S cookie) { ret md5(cookie + salt); } class Request { S cookie, caseID; S frontend, backend; // module IDs *(S *cookie) { caseID = cookieToCaseID(cookie); frontend = dm_makeModuleWithParams_systemQ(frontendModuleLibID, +caseID); backend = dm_makeModuleWithParams_systemQ(backendModuleLibID, +caseID); dm_call(frontend, 'connectToBackend, backend); dm_call(frontend, 'importCmdsFromSnippetIfEmpty, cmdsSnippetID); dm_call(frontend, 'addScripts, collect scriptID(list StandardScript())); Conversation conv = uniq Conversation(+cookie); forwardSwappableFunctionToObject(dm_mod(frontend), 'chatLog_userMessagesOnly, func -> LS { map(m -> m.text, filter(conv.allMsgs(), m -> m.fromUser)) }, 'get); printVars(+caseID, +backend); } } class ThoughtBot { new ThreadLocal request; void setSession(S cookie, SS params) { //session.set(uniq_sync(Session, +cookie)); request.set(new Request(cookie)); } S initialMessage() { //ret "Hello from module " + request->backend; ret (S) dm_call(request->backend, 'answer, "stats"); } S answer(S s) { ret (S) dm_call(request->frontend, 'answer, s); } } S stats() { ret p(joinWithBR( "Server temperature is " + dm_cpuTemperature(), n2(numberOfCruddies(), "cruddie") + ", " + n2(vmBus_countResponses chatBotFrontend()) + " loaded", )); } int numberOfCruddies() { ret countDirsInDir(getProgramDir(beforeSlash(frontendModuleLibID))); } visualize { JComponent c = jtabs("Main", super.visualize(), "Standard Scripts", standardScriptsCRUD.visualizeWithCountInTab(), "Conversations", conversationsCRUD.visualizeWithCountInTab()); standardScriptsCRUD.updateTabTitle(); conversationsCRUD.updateTabTitle(); ret c; } S serveFrameSet(SS params) { ret hhtml(hhead_title("CRUDDIE with frames") + tag frameset( tag frame("", name := "leftmonitor") + tag frame("", src := appendParamsToURL(myLink(), params)) + tag frame("", name := "rightmonitor"), cols := "*,550,*")); } // API / user-callable void deleteAllConversations { cdelete(Conversation); } }