!759 sbool greetCookies, theoryOn; static new L thoughtBots; static long started, indexMakingTime; static Class theoryModule; static Lock theoryLock = lock(); static S selectedWord; static int inputNr = 1; static int myPort = 4678; static double speculationTimeoutForHttp = 5.0; // 5 seconds static Set speculationQueue = synchroHashSet(); static L classesToShare = (L) ll(ISpec, Web, WebNode); // brave! // save space sclass E {} set flag noCLParse. p { started = sysNow(); db(); load('selectedWord); load('inputNr); autoRestart(5); typicalDownload(); serveHttp(myPort); bot("Smart Bot.", func(S s) -> S { S x = "Input line " + (inputNr++); print(x); save("inputNr"); ai_postTriple(x, "is", quote(s)); //ai_speculate(x); stefansChat_onLine_fullParams.set(litmap(auth := true)); try { ret actualAnswer(s); } finally { stefansChat_onLine_fullParams.set(null); } }); loadPage_forcedTimeout = 70000; thoughtBots.add(mc()); //loadBots(#1010753, #1010821, #1010825, #1001951); loadBots(#1010825, #1001951); indexMakingTime = sysNow(); cachedNodeIndex(); // load webs, make index indexMakingTime = sysNow()-indexMakingTime; // TODO ai_onNewIndexedText_do(func(S s) { speculationQueue.add(s) }); stefansChat_n_onChange(r { clearTransientWebs(); addTransientWeb(webFromTriple("Latest chat line", "is", "Chat line " + stefansChat_n_value)); }); stefansChat_onLine_lookback = 2; stefansChat_onLine_onHistoryRead.set(f onHistoryRead); stefansChat_onLine_safe(voidfunc(fS text) { final Map params = stefansChat_onLine_fullParams!; pcall { int nr = toInt(mapGet(params, "nr")); print("> [" + nr + "] " + text); if (nr != 0) { S x = "Chat line " + nr; ai_postTriple(x, "is", quote(text)); ai_postTriple(x, "is", "recent"); stefansChat_n_notify(nr); ai_speculate(x); } } /*thread text { stefansChat_onLine_fullParams.set(params);*/ postToStefansChat(actualAnswer(text)); //} }); //stefansChat_post("Smart Bot Upgraded! Boot took: " + formatDouble(fromMS(sysNow()-started), 1) + " s"); if (greetCookies) runInNewThread(#1010793); // Post on UAIP ai_onNewWeb(f ai_spec_moveToMiddle); ai_extendKnownVerbs(); // Do the slow stuff addVirtualNodeIndex(hugeEnglishDictionary_virtualNodeIndex()); loadTheory(); thread "Speculator" { speculatorLoop(); } } sS actualAnswer(S s) { ret scanMultipleBots(thoughtBots, s); } svoid loadTheory { lock theoryLock; cleanUp(theoryModule); theoryModule = null; if (!theoryOn) ret; theoryModule = run(#1010963); setOpt(theoryModule, 'onUpdate, r { call(theoryModule, 'react) }); call(theoryModule, 'react); print("Theory module loaded."); } static int lineCount; sbool authed() { ret isTrue(mapGet(stefansChat_onLine_fullParams!, 'auth)); } sbool byBot() { ret isTrue(mapGet(stefansChat_onLine_fullParams!, 'botMark)); } answer { try { ++lineCount; bool authed = authed(); lock dbLock(); S std = ai_standardAnswer(s); s = ai_dropDroppableStuff(s); int safety = 0; replaceloop: while (safety++ < 10) { s = trim(s); try answer answer_inner(s, lineCount); for (WebNode node : indexedNodes(s)) { S x = web_operandText(node, "replace with"); if (x != null && neq(s, x)) { print("Replacing with " + x); s = x; continue replaceloop; } } break; } //if (ai_isQuestion_1(s)) ret "No idea"; try answer std; } catch print e { ret exceptionToStringShort(e); } } sS answer_inner(S s, int lineCount) { final new Matches m; if (authed()) { if (lineCount >= stefansChat_onLine_lookback) { if (eqic(s, "!restart")) { restart(); ret "Yo"; } if (eqic(s, "!theory")) ret "OK" with loadTheory(); if (swic_trim(s, "!theory ", m)) ret callAnswerFunction(theoryModule, m.rest()); if (eqic(s, "!peep")) { runAndCleanUp(#1010980); ret "OK, got " + fileLength(ai_chatAnalysis_peepHoleFile()) + " bytes"; } if (swic_trim(s, "!subst-web ", m)) { L tok = javaTokC(m.rest()); if (l(tok) == 2) ret ai_subst_web_charBasedReplace(first(tok), selectWord(unquote(last(tok)))); else ret ai_subst_web(m.rest()); } if (eqic(s, "!idle-patterns")) { time2 { Either e = (Either) evalWithTimeoutReleasingDBLock(60.0, f ai_idle_matchPatterns); } ret e.isA() ? "Made " + nWebs(e.a()) + " in " + toSeconds(lastTiming(), 1) + "s" : "Timeout"; } if (swic_trim(s, "!experiment ", m)) { File outFile = directNohupJavax(m.rest()); ret "OK, " + f2s(outFile); } if (eqic(s, "!rotate")) { rotateSoftwareMadeWebs(); ret "OK"; } if (swic_trim(s, "!speculate-all ", m)) { time2 { Either e = (Either) evalWithTimeoutReleasingDBLock(10.0, r { ai_speculate_all(m.rest()) }); } int n = 0; pcall { n = e.isA() ? e.a() : websMadeInThread(e.b()); } ret "Made " + nWebs(n) + " in " + toSeconds(lastTiming(), 1) + "s"; } if (eqic(s, "!quick-transpile yourself")) ret transpileMyself('quick); if (eqic(s, "!medium-transpile yourself")) ret transpileMyself('medium); } if "unlearn * *" ret "OK, was: " + uniq(MatchLearner, pattern := $1).examples.put($2, 'rejected); if (eqic(s, "!fresh")) { veryQuickJava_refresh(); ret "OK, refreshed"; } if (swic_trim(s, "!sf ", m)) ret sfu(callAndMake_orDirect(m.rest())); if (swic_trim(s, "!specf ", m)) { S sf = m.rest(); print("specf " + sf); Class c = loadFunctionsWithInclude(ll(sf), #1011841); try { callOpt(c, 'ai_spec_init); ret sfu(call(c, sf)); } finally { cleanUp(c); } } if (swic_trim(s, "!eval ", m)) { time2 { O o = evalJava(m.rest()); } ret "[" + lastTiming() + " ms] " + sfu(o); } if (swic_trim(s, "!run ", m)) { runAndCleanUp(m.rest()); ret "OK"; } if (swic_trim(s, "!var ", m)) ret sfu(get(mc(), m.rest()); if (eqic(s, "!mtm")) { ai_spec_moveToMiddle_all(); ret "OK"; } if (eqic(s, "!gc")) ret ai_gc_scan("The most important word"); if (eqic(s, "!num-invalid")) ret lstr(ai_allInvalidWebs()); // end of priviledged commands } if (ai_isQuestion_1(s) && !byBot()) { selectWord(s); ai_postTriple(s, "should be", "answered in the chat"); } if "Are you online?" ret "Yes"; if (eqicOneOf(s, "!question", "!gac")) ret random_gac36k(); if (swic_trim(s, "!word ", m)) { selectWord(m.rest()); ret "OK " + unicode_rightPointingTriangle() + unicode_rightPointingTriangle(); } if (swic_trim(s, "!search ", m)) { selectWord("[search] " + m.rest()); ret "OK " + unicode_rightPointingTriangle() + unicode_rightPointingTriangle(); } if (eqic(s, "!index-time")) ret toS(indexMakingTime, 1) + " s"; if (swic_trim(s, "!bench-search", m)) { T3 t = ai_parseArrowTriple(m.rest()); time2 { int n = l(ai_search_dollarX(t)); } ret n(n, "result") + ", " + lastTiming() + " ms"; } if (swic_trim(s, "!google ", m)) ret join(" - ", swapPair(first(quickGoogle($1)))); if (swic_trim(s, "!has-triple ", m)) { Triple t = ai_parseArrowTriple(m.rest()); ret yn(ai_cache_hasTriple(t)); } if (swic_trim(s, "!triple ", m) || swic_trim(s, "!triples ", m)) { L elements = trimAll(splitAt(m.rest(), "->")); if (l(elements) != 3) ret "3 parts please"; ret "[IMAGE] " + "http://ai1.lol/1007546?triples=" + urlencode(lines(elements)); } if (swic_trim(s, "!web ", m)) { S id = $1; Web web = indexedWebWithGlobalID(id); if (web == null) ret "Not found"; ret "[IMAGE] http://ai1.lol:" + myPort + "/diagram?id=" + id; } if (swic_trim(s, "!parse ", m)) ret ai_renderTriple(ai_tripelize(m.rest())); if (eqic(s, "!worst-case")) ret sfu(getOptMC('ai_hasTriple_vary_worstCase)); if (eqic(s, "!uptime")) ret n(secondsSinceSysTime(started), "second"); if (eqic(s, "!typical-set")) ret sfu(diagramServer_typicalSet()); if (eqic(s, "!num-unverified")) ret lstr(ai_unverifiedWebs()); if (eqic(s, "!num-true")) ret lstr(ai_trueWebs()); if (eqic(s, "!version")) ret autoRestart_localMD5(); if (swic_trim(s, "!blob ", m)) { S a = postNodeFromInput(m.rest(), s); selectWord(m.rest()); ret a; } if (swic_trim(s, "!store ", m)) ret postTripleFromInput(ai_tripelize(m.rest()), s); if (swic_trim(s, "!store-triple ", m)) ret postTripleFromInput(ai_parseArrowTriple(m.rest()), s); if (swic(s, "!nodes ", m)) { if (dropSuffixTrim("...", m.rest(), m)) ret lstr(indexedNodesStartingWith(m.rest())); ret lstr(indexedNodes(m.rest())); } if (eqic(s, "!webs")) ret lstr(allIndexedWebs()); if (swic(s, "!webs ", m)) ret joinWithSpace(collect(indexedWebs($1), 'globalID)); if (swic(s, "!splitAtBaseFormVerb ", m)) ret sfu(splitAtBaseFormVerb($1)); if (swic(s, "!maxRunlengthOfRepeatingChars ", m)) ret str(maxRunlengthOfRepeatingChars($1)); if (swic(s, "!collapseWord ", m)) ret collapseWord($1); if (swic(s, "!gac ", m)) ret or2(first(scoredSearch(m.get(0), keys(gac36k()))), "-"); if (eqic(s, "!vms")) ret computerVMs_text(); if (swic_trim(s, "!count-triple ", m)) { T3 t = ai_parseArrowTriple(m.rest()); print("Searching: " + sfu(t)); Pair p = ai_countBoth_dollarX(t); ret eq(p.a, p.b) ? str(p.a) : p.a + " (+" + (p.b-p.a) + " unverified)"; } pcall { try answer ai_sfCommands(s); } if (swic_trim(s, "!macmillan ", m)) try { Pair> p = macmillanDefinitions3(m.rest()); ret toUpper(p.a) + "\n" + lines(prependAll("-", p.b)); } catch e { printShortException(e); ret "Macmillan is quiet today."; } try answer ai_answerFromCache(s); S _s = s; s = ai_dropLeadingAdditions(s); if (neq(_s, s)) try answer ai_answerFromCache(s); if "cache size" ret n(keys(cachedNodeIndex()), "different term") + ", " + n(web_countNodes(allWebs_cached()), "node") + ", " + n(allWebs_cached(), "web"); if "give me subtypes of ..." ret ai_renderList(ai_subtypesOf(m.rest())); if "give me a third person verb" ret random(thirdPersonVerbs()); if (learnedFlexMatch("give me some *", s, m)) { S query = singular($1); ret ai_renderNodesList(concatLists( ai_index_search_dollarX("$X", "is a", query), ai_index_search_dollarX("$X", "is", a(query)))); } if "authed?" ret authed() ? "You are authed." : "You are not authed."; if "what is the singular of *" ret or2(getSingularFromWordHippo($1), singular($1)); if (learnedFlexMatch("what is the relation between * and *", s, m)) { L l = ai_getRelation($1, $2); //ret ai_renderList(l); if (nempty(l)) ret $1 + " " + first(l) + " " + $2; } if "what unix day is it" ret str(unixDay()); if "show me the ..." ret "What " + $1 + "?"; if (learnedFlexMatch("What do * have?", s, m)) ret ai_renderNodesList(ai_index_search_dollarX($1, "have", "$X")); if (learnedFlexMatch("What does * do?", s, m)) ret ai_renderList(map(f web_nodePairToText, webs_search_dollarXY(webFromTriples($1, "$X", "$Y"), indexedWebs($1)))); if (learnedFlexMatch_multi(s, m, "What is *", "Who is *", "Was ist *", "Wer ist *")) try answer ai_renderNodesList(ai_whatIs(selectWord($1)), ""); S sf = web_text(first(ai_search_dollarX_verified("$X", "implements", quote(s)))); if (startsWith(sf, "sf ", m)) ret str(makeAndCall($1)); if (learnedFlexMatch("* how many visitors *", s) || match("how many visitors", s)) ret str(ai1_cookiesToday_int()); // Once more with generated webs (replacing $ vars) if (learnedFlexMatch("What is *", s, m)) { L extendedWebs = indexedWebsAfterVariableSubstitution($1); print("Have " + n(extendedWebs, "extended web") + " for " + quote($1)); ret ai_renderNodesList(webs_search_dollarX(webFromTriples($1, "is", "$X"), extendedWebs), "I don't know"); } if (!ai_isQuestion_1(s) && learnedFlexMatch_multi(s, m, "* is *", "* ist *")) try answer postTripleFromInput(triple(m.get(0), "is", m.get(1)), s); try answer ai_standardAnswer(s); null; } svoid loadBots(S... botIDs) { for (S id : botIDs) pcall { thoughtBots.add(runSubBot(id)); } } static O html(S uri, SS params) { time { ret html_2(uri, params); } } static O html_2(S s, SS params) { if (eqic(s, "/favicon.ico")) ret serve404(); if "threads" { time2 { S text = renderRunnableThreadsWithStackTraces(); } ret hpre(text + "\n\n" + lastTiming() + " ms"); } if (eqic(s, "/1-sec-profile")) ret html_profile(1); if (eqic(s, "/10-sec-profile")) ret html_profile(10); if "thoughts" ret serveHTML(html_thoughts()); if "diagram" { S id = params.get("id"); Web web = indexedWebWithGlobalID(id); ret serveJPEG(webToCAL(web).makeImage(600, 400)); } if "log" ret serveText_direct(printLog()); if "learners" ret serveText_direct(renderConcepts(list(MatchLearner))); if "unreached" ret h3_title("Unreached") + ul(map html_encyclopediaTopic(ai_gc_unreachedWords())); // Serve Web With Global ID s = dropPrefix("/", s); if (possibleGlobalID(s)) { L webs = allWebsByID().get(s); if (nempty(webs)) { Web web = first(webs); ret h2_title("Web " + s + (ai_isInvalidWeb(web) ? " [INVALID]" : "")) + pre(htmlencode(renderWeb_multiLine(web))) + p(himg(ai_webImage_link(s), title := "Web " + s)) + (l(webs) > 1 ? p("Warning: Multiple webs") : "") + ai_html_wordThoughts("Web " + web.globalID); } /*if (theoryOn && theoryModule == null) ret serveHTML("Loading theory module"); Map map = (Map) getOpt(theoryModule, 'theoryForLineMap); if (map.containsKey(toLower(s))) ret serveHTML("A line in the chat.");*/ ret serveHTML("Unknown: " + s); } new Matches m; if (swic(s, "e/", m)) { S topic = urldecode(m.rest()); time2 { bool timeout = ai_speculateWithActiveAndTimeout(topic, speculationTimeoutForHttp); } long time = lastTiming(); L words = ll(topic); //if (ai_getWeb(topic) != null) words.add("Web " + topic); ret h1_title(htmlencode_noQuote("Topic: " + topic)) //+ p("Have " + n(indexedWebs(topic), "web"))) + ai_html_wordThoughts(words) + p("Speculated for " + time + " ms" + (timeout ? " (TIMED OUT)" : "")); } if (eqic(s, "alphabetical")) ret html_alphabetical(params); if (eqic(s, "latest-webs")) { int n = toInt(params.get('n)); ret html_latestWebs(min(1000, max(n, 10))); } if (eqic(s, "all-web-ids")) { L l = allWebIDs_cloned(); ret serveText( l(l) + "ids follow.\n" + lines(l) + serveText(l(l) + "ids written."); } // Search Results S q = trim(params.get("q")); if (nempty(q)) ret html_searchResults(q); // Home Page (encyclopedia by popularity) time "Popularity search" { L keys = multiMapKeysByPopularity(cachedNodeIndex()); } int step = 100, n = toInt(params.get("n")); int count = l(keys); L l = subList(keys, n, n+step); ret h1_title("Smart Bot's Encyclopedia (" + n_fancy(keys, "entry") + ")") + hform(p("Most occurring | " + ahref("/alphabetical", "Alphabetical") + " | " + ahref("/latest-webs", "Latest") + " | " + htextinput("q") + " " + hsubmit("Search"))) + pageNav2("/", count, n, step, 'n) + ul(map(func(S s) -> S { ahref(smartBot_encyclopediaLink(s), htmlencode(or2(s, "-"))) + " [" + ai_approximateIndexedNodesCount(s) + "]" }, l)); } static O html_profile(int seconds) { if (poorMansProfiling_isOn()) ret "Already on"; poorMansProfiling(100); sleepSeconds(seconds); ret serveText(poorMansProfiling_stopAndRenderResults()); } static S html_searchResults(S q) { int max = 1000; L l = indexedTerms_scoredSearch(q, max+1); bool more = l(l) > max; ret h1_title("Smart Bot Search: " + htmlencode(q) + " (" + n_fancy(l, "result") + ")") + p(l(l) > max ? max + "+ search results" : n(l, "search result")) + ul(map html_encyclopediaTopic(takeFirst(max, l))); } static S html_alphabetical(SS params) { int step = 100, n = toInt(params.get("n")); Collection all = keys(cachedNodeIndex(); int count = l(all); L l = subListOfCollection(all, n, n+step); ret h1_title("Smart Bot's Encyclopedia (Alphabetical) :)") + p(ahref("/", "Most occurring") + " | " + "Alphabetical" + " | " + ahref("/latest-webs", "Latest")) + pageNav2("/alphabetical", count, n, step, 'n) + ul(map(func(S s) -> S { ahref(smartBot_encyclopediaLink(s), htmlencode(or2(s, "-"))) }, l)); } static S html_latestWebs(int n) { ret h1_title("Smart Bot's Encyclopedia - Latest Webs") + ul(map(func(Web web) -> S { ai_html_linkedWeb(web) + htmlencode(" [" + web.created + " - " + renderGMTDate(web.created) + "] " + web.source + ": ") + ai_html_renderWebShort(web) }, takeFirst(n, sortByFieldDesc('created, allWebsFromCachedNodeIndex())))); } sS html_thoughts() { S html = /*hrefresh(5) +*/ hGoogleFontOswald(); S status = ""; Pair p = evalWithTimeout_numberOfCalculations(); if (p.a > 0) status = p(n(p.a, "calculation") + (p.b == 0 ? "" : ", " + p.b + " timed out")); ret html + wordThoughts() + status; /* if (theoryModule == null) ret html + "Loading theory module..."; S thoughts = (S) call(theoryModule, 'html_thoughts); if (nempty(selectedWord)) ret html + tag('table, tr(td_top(thoughts, style := "background: #CCC") + td_top(wordThoughts(), style := "padding-left: 20px"))); ret html + thoughts; */ } sS wordThoughts() { try { if (empty(selectedWord)) ret ""; ret html_addTargetBlank(ai_html_wordThoughts(selectedWord)); } catch e { printStackTrace(e); ret "Erreur"; } } sS postTripleFromInput(T3 triple, S input) { if (swic(input, "OK, stored")) null; if (!ai_tripleAllowedToPost(triple)) null; selectWord(triple.a); if (ai_cache_hasTriple(triple)) ret "I know"; else { Web web = webFromTriple(triple); web.unverified = !authed(); postSoftwareMadeWeb(web, +input); ret "OK, stored" + (web.unverified ? " (unverified)" : "") + ": " + ai_renderTriple(triple); } } sS postNodeFromInput(S node, S input) { if (!ai_nodeNameAllowedToPost(node)) null; if (hasIndexedNode(node)) ret "I know"; else { Web web = oneNodeWeb(node); web.unverified = !authed(); postSoftwareMadeWeb(web, +input); ret "OK, stored" + (web.unverified ? " (unverified)" : "") + ": " + node; } } svoid processSelectedWord { fS word = selectedWord; if (empty(word)) ret; ai_withMaker('processSelectedWord, r { //ai_speculate(word); pcall { ai_greetingRule1(word); if (ai_hasTriple(word, "should be", "answered by me") && ai_postTriple(word, "was", "answered by me") != null) { S text = firstQuoted(web_texts(ai_search_dollarX(word, "is", "$X"))); postToStefansChat((nempty(text) ? text + " << " : "") + "Greetings back to you!"); } } }); } svoid makeChatLinesUnrecent { new Matches m; for (WebNode node : ai_search_dollarX("$X", "is", "recent")) if (web_match("Chat line *", node, m)) pcall { int n = parseInt($1); if (n <= stefansChat_n_value-100) { print("Unrecenting " + n); ai_invalidateWeb(node.web); } } } svoid onHistoryRead { lock dbLock(); print("History read."); processSelectedWord(); pcall { makeChatLinesUnrecent(); } } sS selectWord(S word) { if (nempty(word)) { selectedWord = word; save('selectedWord); processSelectedWord(); } ret word; } sS transpileMyself(S mode) { postToStefansChat("Transpiling..."); Pair p = transpileOnServer(programID(), 'medium); ret p.a ? "OK" : "Not OK"; } svoid speculatorLoop { repeat with sleep 1 { ai_speculate(selectedWord); //ai_speculate(randomIndexedTerm()); ai_activeSpec(selectedWord); S s; while ((s = first_sync(speculationQueue)) != null) { speculationQueue.remove(s); long time = sysNow(); ai_speculateWithActive(s); done2_always(time, "Speculation Queue > " + s); } } } public static ISpec ispec = new ISpec { public Lock aiLock() { ret main.aiLock(); } public bool ai_cache_hasTriple(S a, S b, S c) { ret main.ai_cache_hasTriple(a, b, c); } public S ai_postTriple(S a, S b, S c) { ret main.ai_postTriple(a, b, c); } public L allIndexedFullTerms() { ret main.allIndexedFullTerms(); } public L ai_texts_verified(S a, S b, S c) { ret main.ai_texts_verified(a, b, c); } }; // share ISpec interface with sub-modules static JavaXClassLoader hotwire_makeClassLoader(L files) { ret new JavaXClassLoaderWithParent2(null, files, myClassLoader(), map className(classesToShare)); }