Not logged in.  Login/Logout/Register | List snippets | | Create snippet | Upload image | Upload data

289
LINES

< > BotCompany Repo | #1030401 - LineComp_PairIndex - helper for LineComp compressor [backup before IntPairTreeSet]

JavaX fragment (include)

!include once #1027304 // Eclipse Collections
import org.eclipse.collections.impl.map.mutable.primitive.*;
import org.eclipse.collections.api.tuple.primitive.*;

set flag NoModCount. // save some space. Make sure that this flag doesn't screw other collections/map you compile with this.

final sclass LineComp_PairIndex {
  bool verbose;
  bool inOrder = true;
  //MultiSetMap<IntPair, Node> nodes;
  new LongObjectHashMap<LinkedHashSet<Node>> nodes; // key: pair, value: set of nodes in order
  MultiSetMap<Int, IntPair> byCount // values are pairs
    = null; // init later
    //= multiSetMap_innerHashSet_outerRevTreeMap(); // makes a difference in selecting a pair if there are multiple with the same count
    // = multiSetMap_innerCompactHashSet_outerRevTreeMap(); // same but more compact
    //= multiSetMap_innerTreeSet_outerRevTreeMap(); // initial version that kind of pointlessly sorts the same-count nodes lexicographically
  new LinkedHashMap<S, Ch> chains;
  Ch firstChain;
  int totalPairCount;
  int initialNodesSetCapacity = 1;
  L<IVF1<Node>> onAddingPair, onRemovingPair;
  LongIntHashMap existingPairsIndex;
  
  // This queue checks all newly made pairs to see if they already
  // have a symbol.
  new LinkedList<Node> compressionQueue;
  
  Comparator<IntPair> pairComparatorForBalancing;

  void init {
    /*nodes = inOrder
      ? multiSetMap_innerLinkedHashSet_outerHashMap()
      : multiSetMap_innerHashSet_outerHashMap();*/
  }
  
  // indicate end of chain-making phase.
  // we still accept more chains after this though, I think (for late input)
  void haveChains {
    if (byCount == null) {
      //time "Making byCount" {
        if (pairComparatorForBalancing != null)
          byCount = multiSetMap_innerCustomTreeSet_outerRevTreeMap(pairComparatorForBalancing);
        else
          byCount = multiSetMap_innerCompactHashSet_outerRevTreeMap();
        for (Set<Node> set : nodes.values()) {
          IntPair p = first(set).pair();
          int n = set.size();
          if (verbose) print("Adding node " + p + " (" + n + ")");
          byCount.put(n, p);
        }
      //}
      ifdef LineComp_PairIndex_printSize
        print("byCount size: " + n2(guessObjectSizeWithout(ll(this, pairComparatorForBalancing), byCount)) + " for " + nNodes(nodes.size()));
      endifdef
    }
  }
  
  // a "chain" of nodes (one input file)
  sclass Ch {
    Node tail;
    
    *(LineComp_PairIndex pi, L<Int> l) {
      int count = 0;
      fOr (int i : l) {
        add(pi, i);
        if (((++count) % oneMillion()) == 0)
          print("Added " + n2(count) + " entries to chain");
      }
    }
    
    L<Int> toList() {
      new L<Int> l;
      Node node = tail;
      while (node != null) {
        l.add(node.value);
        node = node.prev;
      }
      ret reverseInPlace(l);
    }
    
    void add(LineComp_PairIndex pi, int i) {
      new Node n;
      ifndef LineComp_SingleChain
        n.ch = this;
      endifndef
      n.value = i;
      n.prev = tail;
      if (tail != null) tail.next = n;
      tail = n;
      pi.addToIndex(n.prev);
      pi.processQueue();
    }
  }
  
  sclass Node {
    ifndef LineComp_SingleChain
      Ch ch;
    endifndef
    int value;
    Node prev, next;
    
    int a() { ret value; }
    int b() { ret next == null ? -1 : next.value; }
    IntPair pair() { ret next == null ? null : IntPair(a(), b()); }
    
    toString {
      ret stringIf(prev != null, "<") + value +
        (next == null ? "" : next.value
         + stringIf(next.next != null, ">")); 
    }
  }
  
  IntPair nodeToPair(Node n) {
    ret n?.pair();
  }
  
  // add node to pair index
  void addToIndex(Node n) {
    IntPair p = nodeToPair(n);
    if (p == null) ret;
    callFAll(onAddingPair, n);
    int count = nodes_getSize(p);
    nodes_add(p, n);
    if (byCount != null) { // not in init phase
      if (count != 0) byCount.remove(count, p);
      byCount.put(count+1, p);
    }
    if (existingPairsIndex != null)
      compressionQueue.add(n);
  }
  
  // remove node from pair index
  void removeFromIndex(Node n) {
    IntPair p = nodeToPair(n);
    if (p == null) ret;
    callFAll(onRemovingPair, n);
    int count = nodes_getSize(p);
    if (count == 0) fail("Can't remove pair " + p);
    nodes_remove(p, n);
    byCount.remove(count, p);
    if (--count > 0) byCount.put(count, p);
  }
  
  // return all pairs in "most popular" bucket
  Cl<IntPair> mostPopularDuplicates() {
    Int key = firstKey(byCount);
    ret toInt(key) < 2 ? null : byCount.get(key);
  }
  
  IntPair mostPopularDuplicate() {
    ret toInt(firstKey(byCount)) < 2 ? null : firstValue(byCount);
  }
  
  IntPair mostPopularPair() {
    ret firstValue(byCount);
  }
  
  int highestBucketNr() {
    ret or0(firstKey(byCount));
  }
  
  // counts buckets actually (lower value than totalPairCount)
  int numberOfDuplicates() {
    ret byCount.size()-l(byCount.get(1));
  }
  
  int getCount(IntPair p) {
    ret nodes_getSize(p);
  }
  
  void replacePair(int pa, int pb, int replaceWith) {
    IntPair p = IntPair(pa, pb);
    Set<Node> set = nodes.get(intPairToLong(p));
    if (set != null)
      for (Node n : cloneList(set)) {
        continue if n.a() != pa || n.b() != pb; // nodes might have been deleted or changed
        replacePair(n, replaceWith);
      }
  }
  
  void replacePair(Node node, int replaceWith) {
    removeFromIndex(node.prev);
    removeFromIndex(node);
    removeFromIndex(node.next);
    node.value = replaceWith;
    deleteNode(node.next);
    addToIndex(node.prev);
    addToIndex(node);
    processQueue();
  }
  
  void processQueue {
    if (existingPairsIndex != null) {
      Node n;
      while not null (n = popFirst(compressionQueue))
        checkIfPairExists(n);
    }
  }
  
  void checkIfPairExists(Node node) {
    if (existingPairsIndex == null) ret;
    if (node.value < 0) ret;
    IntPair pair = nodeToPair(node);
    if (pair == null) ret;
    //print("Checking pair " + pair);
    int i = existingPairsIndex.getIfAbsent(intPairToLong(pair), -1);
    if (i >= 0) {
      //print("  Replacing with: " + i);
      replacePair(node, i);
    }
  }
  
  void deleteNode(Node node) {
    if (node.next != null) node.next.prev = node.prev; else getChain(node).tail = node.prev;
    if (node.prev != null) node.prev.next = node.next;
    node.value = -1; // mark invalid
  }
  
  Ch newChain(S version, L<Int> encoding) {
    Ch ch = new(this, encoding);
    chains.put(version, ch);
    ifdef LineComp_SingleChain
      assertNull(firstChain);
    endifdef
    if (firstChain == null) firstChain = ch;
    ret ch;
  }
  
  int nodes_getSize(IntPair p) {
    ret l(nodes.get(intPairToLong(p)));
  }
  
  void nodes_add(IntPair p, Node n) {
    ++totalPairCount;
    long l = intPairToLong(p);
    LinkedHashSet<Node> set = nodes.get(l);
    if (set == null)
      nodes.put(l, set = new LinkedHashSet(initialNodesSetCapacity);
    set.add(n);
  }
  
  void nodes_remove(IntPair p, Node n) {
    --totalPairCount;
    long l = intPairToLong(p);
    LinkedHashSet<Node> set = nodes.get(l);
    if (set == null || !set.remove(n)) fail("Consistency error");
    if (set.isEmpty())
      nodes.remove(l);
  }
  
  // also shrinks sets...
  void printStats {
    print("Different pairs: " + l(nodes));
    print("Different counts: " + byCount.keysSize() + " (highest count: " + firstKey(byCount) + ")");
    long count = 0, waste = 0, newWaste = 0;
    long count1 = 0, waste1 = 0;
    
    for (HashSet set : nodes.values()) {
      count1 += set.size();
      int capacity = hashSetCapacity(set);
      waste1 += capacity-set.size();
      //if (capacity > set.size()*3) set.shrink(); // TODO
    }
     
    if (pairComparatorForBalancing == null) for (CompactHashSet set : (Cl<CompactHashSet>) (Cl) values(byCount.data)) {
      count += set.size();
      waste += set.capacity()-set.size();
      set.shrinkToFactor(0.5);
      newWaste += set.capacity()-set.size();
    }
    
    print("Nodes set count: " + count1 + ". Waste: " + waste1);
    //print("Set count: " + count + ". Waste: " + waste + (waste == newWaste ? "" : " => " + newWaste));
    print("Duplicates: " + n2(totalPairCount-byCount.count(1)));
  }
  
  Ch getChain(Node node) {
    ifdef LineComp_SingleChain
      ret firstChain;
    endifdef
    ifndef LineComp_SingleChain
      ret node.ch;
    endifndef
  }
  
  /*public LineComp_PairIndex clone() {
    
  }*/
}

Author comment

Began life as a copy of #1028247

download  show line numbers  debug dex   

Travelled to 3 computer(s): bhatertpkbcr, mqqgnosmbjvj, pyentgdyhuwx

No comments. add comment

Snippet ID: #1030401
Snippet name: LineComp_PairIndex - helper for LineComp compressor [backup before IntPairTreeSet]
Eternal ID of this version: #1030401/6
Text MD5: f8d383902744b65e9e47ef0b05d2c67e
Author: stefan
Category: javax
Type: JavaX fragment (include)
Public (visible to everyone): Yes
Archived (hidden from active list): No
Created/modified: 2020-12-13 18:30:43
Source code size: 8717 bytes / 289 lines
Pitched / IR pitched: No / No
Views / Downloads: 22 / 48
Version history: 5 change(s)
Referenced in: [show references]

Formerly at http://tinybrain.de/1030401 & http://1030401.tinybrain.de