!7 cprint CompressorSpike { switchable S snippetID = #1020763; LinkedHashMap versions; start-thread { print("Snippet ID: " + snippetID); dm_onFieldChange snippetID(r { setField(versions := null); dm_reload() }); if (versions == null) { versions = mapToLinkedHashMap(reversed(textChangesOfSnippet(snippetID)), sv -> pair(str(sv.versionID), sv.previousValue)); versions.put("latest", loadSnippet(snippetID)); versions = mapValuesToLinkedHashMap toLinesAndBack(versions); // canonicalize line breaks change(); } print("Have " + nVersions(versions) + " with a total of " + nLines(totalLineCount(values(versions)))); new Compressor().run(); } abstract sclass Chunk { abstract S text(L chunks); } srecord CPair(int i1, int i2) > Chunk { CPair(Pair p) { i1 = p.a; i2 = p.b; } S text(L chunks) { ret linesLL_rtrim(chunks.get(i1).text(chunks), chunks.get(i2).text(chunks)); } } srecord CPrim(S s) > Chunk { S text(L chunks) { ret s; } } class Compressor { replace Encodings with Map>. bool sortLines = true; Map textIDToLines = mapValuesToLinkedHashMap lines(versions); LS allUniqueLines; new L chunks; int primChunks; Map lineIndex; new Map linePairIndex; Encodings finalEncodings; run { allUniqueLines = uniquify(concatLists(values(textIDToLines))); if (sortLines) sortInPlace(allUniqueLines); for (S line : allUniqueLines) chunks.add(new CPrim(line)); primChunks = l(chunks); lineIndex = listIndex(collect s(chunks)); // simple encoding (only direct line references) Encodings simpleEncodings = mapValues(textIDToLines, (IF1>) (lines -> map(lines, line -> lineIndex.get(line)))); //printAndCheckEncodings(simpleEncodings); Encodings advancedEncodings = simpleEncodings; while licensed { Encodings e = compressPairs(advancedEncodings); if (e == advancedEncodings) break; advancedEncodings = e; } finalEncodings = advancedEncodings; printAndCheckEncodings(finalEncodings); S out = exportEncoding(finalEncodings); printWithPrecedingNL(out); File file = saveTextFile_infoBox(javaxDataDir("Compressed Snippet Version History/versions-of-" + psI(snippetID) + ".linecomp"), out); print(renderFileInfo(gzipFile(file))); // Make .zip and .tgz for comparison File zipFile = replaceFileExtension(file, ".zip"); { temp ZipOutputStream zipOut = zipOutputStream(zipFile); for (S id, text : versions) zip_addTextFile(zipOut, id, text); } printFileInfo(zipFile); File tgzFile = replaceFileExtension(file, ".tgz"); zip2tgz(zipFile, tgzFile); printFileInfo(tgzFile); checkDecompression(file, textIDToLines); } void checkDecompression(File file, Map textIDToLines) { temp BufferedReader reader = bufferedUtf8Reader(file); LineCompReader lcr = new(reader); assertEquals(keysList(textIDToLines), asList(lcr.versions())); for (S version : keys(textIDToLines)) assertEquals(lcr.textForVersion(version), lines_rtrim(textIDToLines.get(version))); print("Decompression OK for " + nVersions(textIDToLines)); } S exportEncoding(Encodings encodings) { new LS buf; buf.add("LINECOMP " + primChunks); // magic signature for (Chunk c : chunks) { if (c cast CPair) buf.add(c.i1 + " " + c.i2); else buf.add(((CPrim) c).s); } for (S id, L l : encodings) buf.add(id + "=" + joinWithSpace(l)); ret lines_rtrim(buf); } Encodings compressPairs(Encodings encodings) { new MultiSet> pairCounts; for (L l : values(encodings)) { Pair lastPair = null; for (Pair pair : overlappingPairs(l)) { if (neq(pair, lastPair)) { lastPair = pair; pairCounts.add(pair); } } } //print("Pair counts: " + pairCounts); Pair toCompress = msMostPopularDuplicate(pairCounts); // Compress only most popular pair if (toCompress == null) ret encodings; // Nothing to do int idx = makeCPair(toCompress); print("Made pair: " + toCompress + " -> " + idx + ", " + (msNumberOfDuplicates(pairCounts)-1) + " remaining"); ret mapValues(encodings, (IF1>) encoded -> replaceSublist(encoded, pairToList(toCompress), ll(idx))); } int makeCPair(Pair p) { int idx = addAndReturnIndex(chunks, new CPair(p)); ret idx; } void printAndCheckEncodings(Encodings encodings) { for (S id, L encoded : encodings) { print(id + ": " + joinWithSpace(encoded)); assertEquals(lines(textIDToLines.get(id)), decode(encoded)); } } S decode(L encoded) { ret lines(lambdaMap chunkText(encoded)); } S chunkText(int idx) { ret chunks.get(idx).text(chunks); } } }