Not logged in.  Login/Logout/Register | List snippets | | Create snippet | Upload image | Upload data

342
LINES

< > BotCompany Repo | #1028961 // cruddie.botcompany.de [LIVE]

JavaX source code (Dynamic Module) [tags: use-pretranspiled] - run with: Stefan's OS

Uses 1113K of libraries. Click here for Pure Java version (19810L/117K).

!7

concept StandardScript {
  S scriptID;
}

cmodule2 Cruddie > DynPrintLogAndEnabled {
  set flag NoNanoHTTPD.
  !include #1029545 // API for Eleu
  
  transient S salt;
  transient WebChatBot chatBot;
  transient CRUD<StandardScript> standardScriptsCRUD;
  transient CRUD<Conversation> conversationsCRUD;
  
  switchable int vadUpdateInterval = 100;
  switchable double listenTime = 3.0; // listen for 3 seconds after voice activity
  switchable double listenTimeAfterActualSpeech = 10.0; // listen for 10 seconds after actual speech recognized
  switchable double transcriptTitleShowTime = 5.0; // how long to show recognized text in window title
  switchable bool showVadStatus;
  switchable int defaultHumVolume = 0;
  
  S myLink() { ret "https://cruddie.site/"; }
  S botLink() { ret "bot"; /*ret appendWithSlash(myLink(), "bot");*/ }
  
  switchable S frontendModuleLibID = "#1027675/ChatBotFrontend";
  switchable S backendModuleLibID = "#1027591/DynamicClassesMultiCRUD";
  transient S cmdsSnippetID = #1027616;

  start {
    db();
    standardScriptsCRUD = new CRUD(StandardScript);
    conversationsCRUD = new CRUD(Conversation);
    conversationsCRUD.ensureIndexed = true;
    thread enter { pcall {
      File saltFile = secretProgramFile("salt.txt");
      S salt = trimLoadTextFile(saltFile);
      if (empty(salt)) {
        saveTextFile(saltFile, salt = randomID());
        print("Made salt"); 
      }
      dm_restartOnFieldChange enabled();
      if (!enabled) ret;
      chatBot = new WebChatBot;
      chatBot.forceCookie = true;
      chatBot.preprocess = s -> {
        S s2 = googleDecensor(s);
        print("Preprocessing: " + s + " => " + s2);
        ret s2;
      };
      chatBot.templateID = #1027690;
      chatBot.baseLink = botLink();
      chatBot.thoughtBot = new ThoughtBot;
      
      chatBot.jsOnMsgHTML = "window.processNewStuff(src);";
      
      chatBot.onBotShown = [[ {
        var input = $("#status_message")[0];
        console.log("input: " + input);
        if (input)
          new Awesomplete(input, {
            minChars: 1,
            list: [
              "I call you Fido",
              "What is your name?",
              'add script "#1027704/SomeCruddieScripts/RepeatAfterMe"',
              'add script "#1027704/SomeCruddieScripts/GoPublic"',
              'clear scripts'
            ]
          });
      } ]];
      
      chatBot.afterHeading = "` + ('webkitSpeechRecognition' in window ? ` &nbsp; " + tag("button", "...", onclick := lineBreaksToSpaces([[
        startOrStopSpeechRecog();
        if (bigOn) { lastHadVoice = Date.now(); startVAD(); startUpdater(); humOn(); }
        else stopVAD();
      ]]), type := 'button, class := 'speechOnBtn, disabled := 'disabled, display := 'inline)
        /*+ hjs([[console.log("Updating"); window.srUpdate();]])*/ + "` : ``) + `"
        + hdiv(hsnippetimg(#1102938, width := 24, height := 24, title := "Streaming audio to cloud"), style := "display: inline; visibility: hidden; margin-left: 10px", class := "listenStatus")
        + (!showVadStatus ? "" : hdiv(hsnippetimg(#1102908, width := 24, height := 24, title := "Someone is speaking (either me or you)"), style := "display: inline; visibility: hidden; margin-left: 10px", class := "vadStatus"))
        + hdiv(small("Note: All conversations are public rn " + targetBlank("https://www.youtube.com/watch?v=StxQerL0D-o", "(why)")));
        
      chatBot.moreStuff = "window.srUpdate();";
  
      chatBot.start();
    }}
  }

  O html(virtual Request request) { try {
    S uri = cast get(request, 'uri);
    SS params = cast get(request, 'params);
    print(+params);
    
    //S cookie = serveHttp_cookieHandling();
    
    // new-style (server-generated) cookie
    S newStyleCookie = cast call(request, 'cookie);
    print("cookie", newStyleCookie);
    
    WebChatBot.Request botReq = chatBot.newRequest("/", params);
    botReq.clientIP = assertNotNull("client ip", (S) dm_call(request, 'clientIP));
    botReq.cookie = newStyleCookie;
    
    new Matches m;
    S uri2 = appendSlash(uri);
    if (startsWith(uri2, "/bot/", m)) {
      botReq.uri = "/" + m.rest();
      ret botReq.html();
    }
    if (eq(uri, "/awesomplete.css")) ret subBot_serveWithContentType(loadSnippet(#2000595), "text/css");
    if (eq(uri, "/awesomplete.js")) ret subBot_serveText(loadSnippet(#2000594));
    if (endsWith(uri, ".map")) ret "";
    
    if (eq(uri, "/frames"))
      ret serveFrameSet(params);
      
    S jsOnSpeech = [[
      console.log("Got speech: " + transcript);
      if (transcript == 'stop listening')
        stopVAD();
      else
        window.submitAMsg(transcript);
      lastHeard = transcript;
      lastHeardWhen = Date.now();
    ]];
    
    S sayBotMsgsScript = [[
      window.processNewStuff = function(src) {
      ]] + (eq(params.get('quiet), "1") ? "" : [[
        if ($("#speechResults") == null) return; // no speech
        // we assume that webkit speech synthesis is present
        // when there is webkit speech recognition
        if (!bigOn) return; // not enabled
        console.log("Got speech");
        var match = src.match(/\d+/);
        if (match == null) return;
        if (src.match(/NEW DIALOG -->/)) return;
        console.log("Got incremental");
        var re = /bot-utterance">(.*?)</g;
        var match = re.exec(src);
        var lastUtterance = null;
        while (match != null) {
          lastUtterance = match[1];
          match = re.exec(src);
        }
        // TODO: properly drop HTML tags/HTML-decode
        if (lastUtterance)
          say(lastUtterance);
      ]]) + [[
      };
    ]];
    
    // old-style cookie (client-generated)
    // S botScript = (S) chatBot.html("/", litmap(), returnJS := true);
    
    S botScript = cast botReq.html(returnJS := true);
    
    int humVolume = defaultHumVolume;
    
    S googleClientID = cast call(request, 'googleClientID);
    
    ret hhtml(hmobilefix() + hhead(
        htitle("CRUDDIE - I manage your anything")
      + hLoadJQuery2()
      + hjs_humWithFade(humVolume/100.0)
      + hJsMakeCookie()
      + [[<link rel="stylesheet" href="awesomplete.css" /><script src="awesomplete.js"></script>]] // took out async
      + googleSignIn_header("", googleClientID)
      )
      + hbody(hOnBottom(
        p(hsnippetimage(#1102905))
      + p("Work in progress")
      + p("Hum volume (sound when listening): " + htextinput("", humVolume, style := "width: 5ch", id := "humVolumeInput", onInput := "updateHumVolume(parseInt(this.value)/100.0)"))
      
      + p(hcheckboxWithText("antiFeedback", "Anti-feedback [when using speakers]", true, onclick := "antiFeedback = this.checked;"))
      
      // make checkbox label non-bold
      + hcss("label { font-weight: normal !important; }")
      
      + stats()
      + (empty(googleClientID) ? "" :
        p(googleSignIn_signInButton(myLink() + "google-verify") + " " + googleSignIn_signOutTextLink()))
      + hSpeechRecognition(jsOnSpeech, true, "en-US", false,
        noWebKit := p("Use Chrome if you want speech recognition"))
      + hjavascript([[
        function say(text) {
          console.log("Saying: " + text);
          var u = new SpeechSynthesisUtterance(text);
          u.lang = 'en-US';
          u.onstart = function() { console.log("speech start"); meSpeaking = true; };
          u.onend = function() { meSpeaking = false; };
          window.speechSynthesis.speak(u);
        }
      ]] + sayBotMsgsScript)
      + hjs(botScript)
      + hVAD(
        [[console.log("voice start"); $(".vadStatus").css("visibility", "visible");]],
        [[console.log("voice stop"); $(".vadStatus").css("visibility", "hidden");]],
        false)
      + hjs_setTitleStatus()
      + hjs(replaceDollarVars([[
        var updater;
        var lastHadVoice = 0;
        var lastHeard, lastHeardWhen = 0;
        var meSpeaking = false;
        var antiFeedback = true;
        
        //audioMeterDebug = true;
        
        function startUpdater() {
          if (updater) return;
          console.log("Starting updater");
          updater = setInterval(vadMagicUpdate, $interval);
          srPause = true;
        }
        
        function stopUpdater() {
          if (!updater) return;
          console.log("Stopping updater");
          clearInterval(updater);
          updater = null;
          window.resetTitle();
        }
        
        function vadMagicUpdate() {
          var now = Date.now();
          var hasVoice = vadHasVoice();
          var clipping = vadHasClipping();
          if (hasVoice) lastHadVoice = now;
          var shouldListen1 = bigOn && (lastHadVoice >= now-$listenTime || lastHeardWhen >= now-$listenTimeAfterActualSpeech);
          var shouldListen = !(meSpeaking && antiFeedback) && shouldListen1;
          var titleStatus = "";
          if (lastHeardWhen >= now-$transcriptTitleShowTime)
            titleStatus = lastHeard + " |";
          else if (shouldListen)
            titleStatus = $listeningSymbol;
          else if (bigOn)
            titleStatus = $ear;
          if (clipping)
            titleStatus = "! " + titleStatus;
          window.setTitleStatus(titleStatus);
          if (srPause != !shouldListen) {
            console.log(shouldListen ? "Listening" : "Not listening");
            srPause = !shouldListen;
            srUpdate();
          }
          if (shouldListen1) humOn(); else humOff();
          if (!bigOn) { stopUpdater(); return; }
        }
        
        // debug mic level
        /*setInterval(function() {
          if (audioMeter)
            console.log("Mic level: " + audioMeter.absLevel);
        }, 1000);*/
      ]],
        interval := vadUpdateInterval,
        listenTime := toMS(listenTime),
        listenTimeAfterActualSpeech := toMS(listenTimeAfterActualSpeech),
        transcriptTitleShowTime := toMS(transcriptTitleShowTime),
        listeningSymbol := jsQuote(/*"[LISTENING]"*/unicode_cloud()),
        ear := jsQuote(unicode_ear())))
      )/*, onLoad := "startAwesomplete()"*/));
  } catch e { printStackTrace(e); throw rethrow(e); }
  }
  
  S cookieToCaseID(S cookie) {
    ret md5(cookie + salt);
  }
  
  class Request {
    S cookie, caseID;
    S frontend, backend; // module IDs
    
    *(S *cookie) {
      caseID = cookieToCaseID(cookie);
      frontend = dm_makeModuleWithParams_systemQ(frontendModuleLibID, +caseID);
      backend = dm_makeModuleWithParams_systemQ(backendModuleLibID, +caseID);
      dm_call(frontend, 'connectToBackend, backend);
      dm_call(frontend, 'importCmdsFromSnippetIfEmpty, cmdsSnippetID);
      dm_call(frontend, 'addScripts, collect scriptID(list StandardScript()));
      Conversation conv = uniq Conversation(+cookie);
      forwardSwappableFunctionToObject(dm_mod(frontend),
        'chatLog_userMessagesOnly, func -> LS {
          map(m -> m.text, filter(conv.allMsgs(), m -> m.fromUser))
        }, 'get);
      printVars(+caseID, +backend);
    }
  }
  
  class ThoughtBot {
    new ThreadLocal<Request> request;
    
    void setSession(S cookie, SS params) {
      //session.set(uniq_sync(Session, +cookie));
      request.set(new Request(cookie));
    }
    
    S initialMessage() {
      //ret "Hello from module " + request->backend;
      ret (S) dm_call(request->backend, 'answer, "stats");
    }
    
    S answer(S s) {
      ret (S) dm_call(request->frontend, 'answer, s);
    }
  }
  
  S stats() {
    ret p(joinWithBR(
      "Server temperature is " + dm_cpuTemperature(),
      n2(numberOfCruddies(), "cruddie") + ", " + n2(vmBus_countResponses chatBotFrontend()) + " loaded",
    ));
  }
  
  int numberOfCruddies() {
    ret countDirsInDir(getProgramDir(beforeSlash(frontendModuleLibID)));
  }
  
  visualize {
    JComponent c = jtabs("Main", super.visualize(),
      "Standard Scripts", standardScriptsCRUD.visualizeWithCountInTab(),
      "Conversations", conversationsCRUD.visualizeWithCountInTab());
    standardScriptsCRUD.updateTabTitle();
    conversationsCRUD.updateTabTitle();
    ret c;
  }
      
  S serveFrameSet(SS params) {
    ret hhtml(hhead_title("CRUDDIE with frames") +
      tag frameset(
        tag frame("", name := "leftmonitor") +
        tag frame("", src := appendParamsToURL(myLink(), params)) +
        tag frame("", name := "rightmonitor"), cols := "*,550,*"));
  }
  
  // API / user-callable
  
  void deleteAllConversations {
    cdelete(Conversation);
  }
}

Author comment

Began life as a copy of #1027610

download  show line numbers  debug dex  old transpilations   

Travelled to 7 computer(s): bhatertpkbcr, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tvejysmllsmz, vouqrxazstgt, xrpafgyirdlv

No comments. add comment

Snippet ID: #1028961
Snippet name: cruddie.botcompany.de [LIVE]
Eternal ID of this version: #1028961/34
Text MD5: 9dfa57c83aea0e345bd2d345239a2d27
Transpilation MD5: 61f0d2beb1e79c5882dc5c27c5044884
Author: stefan
Category: javax
Type: JavaX source code (Dynamic Module)
Public (visible to everyone): Yes
Archived (hidden from active list): No
Created/modified: 2021-09-05 09:48:16
Source code size: 12793 bytes / 342 lines
Pitched / IR pitched: No / No
Views / Downloads: 268 / 44116
Version history: 33 change(s)
Referenced in: [show references]