!7 sbool greetCookies, theoryOn = true; static new L thoughtBots; static long started; static Class theoryModule; static Lock theoryLock = lock(); static double delay = 0; static S selectedWord; static int myPort = 4678; p { started = sysNow(); db(); load('selectedWord); autoRestart(5); serveHttp(myPort); typicalDownload(); bot("Smart Bot."); loadPage_forcedTimeout = 70000; thoughtBots.add(mc()); loadBots(#1010753, #1010821, #1010825, #1001951); stefansChat_onLine_lookback = 2; stefansChat_onLine_safe(voidfunc(S text) { sleepSeconds(delay); print("> " + text); S answer = scanMultipleBots(thoughtBots, text); if (nempty(answer)) postToStefansChat(answer); }); //stefansChat_post("Smart Bot Upgraded! Boot took: " + formatDouble(fromMS(sysNow()-started), 1) + " s"); if (greetCookies) runInNewThread(#1010793); // Post on UAIP loadTheory(); } svoid loadTheory { lock theoryLock; cleanUp(theoryModule); theoryModule = null; if (!theoryOn) ret; theoryModule = run(#1010963); setOpt(theoryModule, 'onUpdate, r { call(theoryModule, 'react) }); call(theoryModule, 'react); print("Theory module loaded."); } static int lineCount; answer { ++lineCount; bool authed = isTrue(stefansChat_onLine_fullParams->get('auth)); int safety = 0; replaceloop: while (safety++ < 10) { s = trim(s); if (authed) { if (lineCount >= stefansChat_onLine_lookback) { if (eqic(s, "!restart")) { restart(); ret "Yo"; } if (eqic(s, "!theory")) ret "OK" with loadTheory(); if (swic_trim(s, "!theory ", m)) ret callAnswerFunction(theoryModule, m.rest()); if (eqic(s, "!peep")) { runAndCleanUp(#1010980); ret "OK, got " + fileLength(ai_chatAnalysis_peepHoleFile()) + " bytes"; } } if "unlearn * *" ret "OK, was: " + uniq(MatchLearner, pattern := $1).examples.put($2, 'rejected); } if "Are you online?" ret "Yes"; if (eqic(s, "!question")) ret random_gac36k(); if (swic_trim(s, "!word ", m)) { selectedWord = $1; save('selectedWord); ret "OK"; } if (swic_trim(s, "!google ", m)) ret join(" - ", swapPair(first(quickGoogle($1)))); if (swic_trim(s, "!triple ", m) || swic_trim(s, "!triples ", m)) { L elements = trimAll(splitAt(m.rest(), "->")); if (l(elements) != 3) ret "3 parts please"; ret "[IMAGE] " + "http://ai1.lol/1007546?triples=" + urlencode(lines(elements)); } if (swic_trim(s, "!web ", m)) { S id = $1; Web web = indexedWebWithGlobalID(id); if (web == null) ret "Not found"; ret "[IMAGE] http://ai1.lol:" + myPort + "/diagram?id=" + id; } if (swic_trim(s, "!parse ", m)) ret ai_renderTriple(ai_tripelize(m.rest())); if (eqic(s, "!uptime")) ret n(secondsSinceSysTime(started), "second"); if (eqic(s, "!typical-set")) ret sfu(diagramServer_typicalSet()); if (swic_trim(s, "!store ", m)) { T3 triple = ai_tripelize(m.rest()); postSoftwareMadeWeb(webFromTriple(triple), input := s); ret "OK, stored: " + ai_renderTriple(triple); } if (swic_trim(s, "!store-triple ", m)) { T3 triple = ai_parseArrowTriple(m.rest()); postSoftwareMadeWeb(webFromTriple(triple), input := s); ret "OK, stored: " + ai_renderTriple(triple); } if (swic(s, "!nodes ", m)) { if (dropSuffixTrim("...", $1, m)) ret lstr(indexedNodesStartingWith($1)); ret lstr(indexedNodes($1)); } if (eqic(s, "!webs")) ret lstr(allIndexedWebs()); if (swic(s, "!webs ", m)) ret joinWithSpace(collect(indexedWebs($1), 'globalID)); if (swic(s, "!splitAtBaseFormVerb ", m)) ret sfu(splitAtBaseFormVerb($1)); if (swic(s, "!maxRunlengthOfRepeatingChars ", m)) ret str(maxRunlengthOfRepeatingChars($1)); if (swic(s, "!collapseWord ", m)) ret collapseWord($1); if (swic(s, "!gac ", m)) ret or2(first(scoredSearch(m.get(0), keys(gac36k()))), "-"); s = ai_dropLeadingAdditions(s); if "cache size" ret n(keys(cachedNodeIndex()), "different term") + ", " + n(web_countNodes(allWebs_cached()), "node") + ", " + n(allWebs_cached(), "web"); if "give me subtypes of ..." ret ai_renderList(ai_subtypesOf(m.rest())); if "give me a third person verb" ret random(thirdPersonVerbs()); if (learnedFlexMatch("give me some *", s, m)) { S query = singular($1); ret ai_renderNodesList(concatLists( ai_index_search_dollarX("$X", "is a", query), ai_index_search_dollarX("$X", "is", a(query)))); } if "authed?" ret authed ? "You are authed." : "You are not authed."; if "what is the singular of *" ret or2(getSingularFromWordHippo($1), singular($1)); if (learnedFlexMatch("what is the relation between * and *", s, m)) { L l = ai_getRelation($1, $2); //ret ai_renderList(l); if (nempty(l)) ret $1 + " " + first(l) + " " + $2; } if "what unix day is it" ret str(unixDay()); if "show me the ..." ret "What " + $1 + "?"; if (learnedFlexMatch("What do * have?", s, m)) ret ai_renderNodesList(ai_index_search_dollarX($1, "have", "$X")); if (learnedFlexMatch("What does * do?", s, m)) ret ai_renderList(map(f web_nodePairToText, webs_search_dollarXY(webFromTriples($1, "$X", "$Y"), indexedWebs($1)))); if (learnedFlexMatch("What is *", s, m) | learnedFlexMatch("Who is *", s, m)) try answer ai_renderNodesList(ai_whatIs($1), ""); S sf = web_text(first(webs_search_dollarX(webFromTriples("$X", "implements", quote(s)), indexedWebs(quote(s))))); if (startsWith(sf, "sf ", m)) ret str(makeAndCall($1)); if (learnedFlexMatch("* how many visitors *", s) || match("how many visitors", s)) ret str(ai1_cookiesToday_int()); // Once more with generated webs (replacing $ vars) if (learnedFlexMatch("What is *", s, m)) { L extendedWebs = indexedWebsAfterVariableSubstitution($1); print("Have " + n(extendedWebs, "extended web") + " for " + quote($1)); ret ai_renderNodesList(webs_search_dollarX(webFromTriples($1, "is", "$X"), extendedWebs), "I don't know"); } S match; if (!ai_isQuestion_1(s) && learnedFlexMatch(match = "* is *", s, m)) { postSoftwareMadeWeb(webFromTriples($1, "is", $2), +match, input := s); ret "OK, stored."; } for (WebNode node : indexedNodes(s)) { S x = web_operandText(node, "replace with"); if (x != null) { print("Replacing with " + x); s = x; continue replaceloop; } } break; } if (ai_isQuestion_1(s)) ret "No idea"; } svoid loadBots(S... botIDs) { for (S id : botIDs) pcall { thoughtBots.add(runSubBot(id)); } } // s = uri static NanoHTTPD.Response html(S s, SS params) { if "thoughts" ret serveHTML(html_thoughts()); if "diagram" { S id = params.get("id"); Web web = indexedWebWithGlobalID(id); ret serveJPEG(webToCAL(web).makeImage(600, 400)); } if "log" ret serveText_direct(printLog()); if "learners" ret serveText_direct(renderConcepts(list(MatchLearner))); s = dropPrefix("/", s); if (possibleGlobalID(s)) { if (theoryModule == null) ret serveHTML("Loading theory module"); Map map = (Map) getOpt(theoryModule, 'theoryForLineMap); if (map.containsKey(toLower(s))) ret serveHTML("A line in the chat."); ret serveHTML("Unknown"); } // Home Page ret serveHTML(h1_title("Smart Bot's Encyclopedia :)") + ul_htmlencode(map(rcurry(f or2, "-"), keys(cachedNodeIndex()))); } sS html_thoughts() { S html = /*hrefresh(5) +*/ hGoogleFontOswald(); ret html + wordThoughts(); /* if (theoryModule == null) ret html + "Loading theory module..."; S thoughts = (S) call(theoryModule, 'html_thoughts); if (nempty(selectedWord)) ret html + tag('table, tr(td_top(thoughts, style := "background: #CCC") + td_top(wordThoughts(), style := "padding-left: 20px"))); ret html + thoughts; */ } sS wordThoughts() { try { if (empty(selectedWord)) ret ""; new Matches m; new Set ignoredWebs; for (WebNode n : ai_index_search_dollarX("$X", "can be", "ignored")) if (swic(web_text(n), "web ", m)) ignoredWebs.add($1); L nodes = web_nodesNotFromCertainWebs(ignoredWebs, indexedNodes(selectedWord)); L rel1 = web_collectBackwardRelations(nodes); L rel2 = web_collectForwardRelations(nodes); //L is = ai_whatIs(selectedWord); ret h3(htmlencode(selectedWord)) + ul(map html_linkURLs_targetBlank( allToString(flattenArray2( uniquify( map(func(WebRelation r) { htmlencode_noQuotes(web_text(r) + " " + ai_renderNode(r.b)) + " [" + r.web.globalID + "]" }, rel2) ), uniquify( map(func(WebRelation r) { htmlencode_noQuotes(ai_renderNode(r.a) + " " + web_text(r) + " " + web_text(r.b)) + " [" + r.web.globalID + "]" }, rel1) ), "[" + n(nodes, "node") + "]" )))); } catch e { printStackTrace(e); ret "Erreur"; } }