[mathicgb] 352/393: Simplified PolyHashTable and gave BjarkeGeobucket2 a new hash table based on MonmialMap that is significantly faster than the old one.

Doug Torrance dtorrance-guest at moszumanska.debian.org
Fri Apr 3 15:59:33 UTC 2015


This is an automated email from the git hooks/post-receive script.

dtorrance-guest pushed a commit to branch upstream
in repository mathicgb.

commit adf16d8337a66e2bb26722e12795dcf92b184dd8
Author: Bjarke Hammersholt Roune <bjarkehr.code at gmail.com>
Date:   Tue Sep 3 11:55:02 2013 +0200

    Simplified PolyHashTable and gave BjarkeGeobucket2 a new hash table based on MonmialMap that is significantly faster than the old one.
---
 src/mathicgb/BjarkeGeobucket.cpp  |  20 --
 src/mathicgb/BjarkeGeobucket2.cpp | 440 +++++++++++++++++++++++++++++---------
 src/mathicgb/BjarkeGeobucket2.hpp | 103 +--------
 src/mathicgb/MonoMonoid.hpp       |   7 +
 src/mathicgb/PolyHashReducer.cpp  |   5 -
 src/mathicgb/PolyHashReducer.hpp  |   1 -
 src/mathicgb/PolyHashTable.cpp    | 348 ++++++------------------------
 src/mathicgb/PolyHashTable.hpp    |  53 +----
 src/mathicgb/Reducer.cpp          |   2 +-
 src/mathicgb/ReducerHash.hpp      |  65 +-----
 src/test/poly-test.cpp            |  37 ----
 11 files changed, 429 insertions(+), 652 deletions(-)

diff --git a/src/mathicgb/BjarkeGeobucket.cpp b/src/mathicgb/BjarkeGeobucket.cpp
index 2937fdf..0dc0dd7 100755
--- a/src/mathicgb/BjarkeGeobucket.cpp
+++ b/src/mathicgb/BjarkeGeobucket.cpp
@@ -25,7 +25,6 @@ void BjarkeGeobucket::insertTail(const_term multiplier, const Poly *g1)
 {
   if (g1->nTerms() <= 1) return;
 
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
   HashPoly M;
   H_.insert(multiplier, ++(g1->begin()), g1->end(), M);
 
@@ -38,16 +37,12 @@ void BjarkeGeobucket::insertTail(const_term multiplier, const Poly *g1)
   stats_n_inserts++;
   stats_n_compares += G_.getConfiguration().getComparisons();
   G_.getConfiguration().resetComparisons();
-
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
 }
 
 void BjarkeGeobucket::insert(monomial multiplier, const Poly *g1)
 {
   HashPoly M;
 
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
-
   H_.insert(multiplier, g1->begin(), g1->end(), M);
 
   if (!M.empty())
@@ -59,13 +54,10 @@ void BjarkeGeobucket::insert(monomial multiplier, const Poly *g1)
   stats_n_inserts++;
   stats_n_compares += G_.getConfiguration().getComparisons();
   G_.getConfiguration().resetComparisons();
-
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
 }
 
 bool BjarkeGeobucket::findLeadTerm(const_term &result)
 {
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
   while (!G_.empty())
     {
       if (H_.popTerm(G_.top(), result.coeff, result.monom))
@@ -82,8 +74,6 @@ void BjarkeGeobucket::removeLeadTerm()
 {
   G_.pop();
   mNodeCount--;
-
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
 }
 
 void BjarkeGeobucket::value(Poly &result)
@@ -96,23 +86,18 @@ void BjarkeGeobucket::value(Poly &result)
       G_.pop();
       mNodeCount--;
     }
-
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
   resetReducer();
 }
 
 void BjarkeGeobucket::resetReducer()
 {
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
   const_term t;
   while (findLeadTerm(t))
     {
       G_.pop();
       mNodeCount--;
     }
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
   H_.reset();
-  MATHICGB_ASSERT(mNodeCount == H_.getNodeCount());
   // how to reset G_ ?
 }
 
@@ -124,9 +109,4 @@ size_t BjarkeGeobucket::getMemoryUse() const
   return result;
 }
 
-void BjarkeGeobucket::dump() const
-{
-  H_.dump(0);
-}
-
 MATHICGB_NAMESPACE_END
diff --git a/src/mathicgb/BjarkeGeobucket2.cpp b/src/mathicgb/BjarkeGeobucket2.cpp
index 61ef930..7e913a8 100755
--- a/src/mathicgb/BjarkeGeobucket2.cpp
+++ b/src/mathicgb/BjarkeGeobucket2.cpp
@@ -3,129 +3,369 @@
 #include "stdinc.h"
 #include "BjarkeGeobucket2.hpp"
 
-#include <iostream>
+#include "TypicalReducer.hpp"
+#include "PolyHashTable.hpp"
+#include <mathic.h>
 
 MATHICGB_NAMESPACE_BEGIN
 
-BjarkeGeobucket2::BjarkeGeobucket2(const PolyRing *R0):
-  mRing(*R0),
-  mHashTableOLD(R0, 10),
-  mHeap(GeoConfiguration(*R0, 4, 1)),
-  mHashTable(BjarkeGeobucket2Configuration(*R0), 10) {
-}
+class MonoMap {
+public:
+  typedef PolyRing::Monoid Monoid;
+  typedef Monoid::ConstMonoRef ConstMonoRef;
+  typedef Monoid::MonoRef MonoRef;
+  typedef coefficient Value;
+
+  class Node {
+  public:
+    ConstMonoRef mono() const {return *Monoid::toMonoPtr(mMono);}
+    MonoRef mono() {return *Monoid::toMonoPtr(mMono);}
+
+    Value& value() {return mValue;}
+    const Value& value() const {return mValue;}
+
+  private:
+    friend class MonoMap;
+
+    Node*& next() {return mNext;}
+    Node* next() const {return mNext;}
+
+    Node* mNext;
+    Value mValue;
+    exponent mMono[1];
+  };
+
+  // Construct a hash table with at least requestedBucketCount buckets. There
+  // may be more buckets. Currently the number is rounded up to the next power
+  // of two.
+  MonoMap(
+    const size_t requestedBucketCount,
+    const PolyRing& ring
+  ):
+    mHashToIndexMask(computeHashMask(requestedBucketCount)),
+    mBuckets
+      (make_unique_array<Node*>(hashMaskToBucketCount(mHashToIndexMask))),
+    mRing(ring),
+    mNodes(sizeofNode(ring)
+    ),
+    mSize()
+  {
+    std::fill_n(mBuckets.get(), bucketCount(), nullptr);
+  }
+
+  const PolyRing::Monoid& monoid() const {return mRing.monoid();}
 
-void BjarkeGeobucket2::insert(Poly::const_iterator first, 
-                              Poly::const_iterator last,
-                              std::vector<node*> &result)
-{
-  for (Poly::const_iterator i = first; i != last; ++i)
-    {
-      monomial monomspace = mRing.allocMonomial(mArena);
-      mRing.monomialCopy(i.getMonomial(), monomspace);
-      std::pair<bool, node*> found = mHashTable.insert(monomspace, i.getCoefficient());
-      if (found.first)
-        {
-          // remove the monomial.  It should be at the top of the mArena arena.
-          mRing.freeTopMonomial(mArena,monomspace);
-          mRing.coefficientAddTo(found.second->value(), i.getCoefficient());
-        }
-      else
-        {
-          result.push_back(found.second);
-        }
+  void rehash(const size_t requestedBucketCount) {
+    const auto newHashToIndexMask = computeHashMask(requestedBucketCount);
+    const auto newBucketCount = hashMaskToBucketCount(newHashToIndexMask);
+    auto newBuckets = make_unique_array<Node*>(newBucketCount);
+    std::fill_n(newBuckets.get(), newBucketCount, nullptr);
+
+    const auto bucketsEnd = mBuckets.get() + bucketCount();
+    for (auto bucket = mBuckets.get(); bucket != bucketsEnd; ++bucket) {
+      for (auto node = *bucket; node != 0;) {
+        const auto hash = monoid().hash(node->mono());
+        const auto newIndex = hashToIndex(hash, newHashToIndexMask);
+        const auto next = node->next();
+        node->next() = newBuckets[newIndex];
+        newBuckets[newIndex] = node;
+        node = next;
+      }
     }
-}
 
-///////////////////////////////////////
-// External interface routines ////////
-///////////////////////////////////////
-void BjarkeGeobucket2::insertTail(const_term multiplier, const Poly *g1)
-{
-  MATHICGB_ASSERT(g1 != 0);
-  MATHICGB_ASSERT(g1->termsAreInDescendingOrder());
+    mHashToIndexMask = newHashToIndexMask;
+    mBuckets = std::move(newBuckets);
+  }
 
-  if (g1->nTerms() <= 1)
-    return;
+  /// Return how many buckets the hash table has.
+  size_t bucketCount() const {
+    return hashMaskToBucketCount(mHashToIndexMask);
+  }
 
-  HashPoly M;
-  mHashTableOLD.insert(multiplier, ++(g1->begin()), g1->end(), M);
+  /// Return the number of elements (not the number of buckets).
+  size_t size() const {return mSize;}
 
-  if (!M.empty())
-    mHeap.push(M.begin(),M.end());
+  MATHICGB_INLINE
+  std::pair<Node*, bool> insertProduct(ConstMonoRef a, ConstMonoRef b) {
+    auto newNode = new (mNodes.alloc()) Node();
+    monoid().multiply(a, b, newNode->mono());
+    const auto abHash = monoid().hash(newNode->mono());
+    auto& bucket = mBuckets[hashToIndex(abHash)];
 
-  stats_n_inserts++;
-  stats_n_compares += mHeap.getConfiguration().getComparisons();
-  mHeap.getConfiguration().resetComparisons();
-}
+    for (auto node = bucket; node != nullptr; node = node->next()) {
+      if (abHash != monoid().hash(node->mono()))
+        continue;
+      if (monoid().equal(newNode->mono(), node->mono())) {
+        mNodes.free(newNode);
+        return std::make_pair(node, false); // found a*b.
+      }
+    }
 
-void BjarkeGeobucket2::insert(monomial multiplier, const Poly *g1)
-{
-  MATHICGB_ASSERT(g1 != 0);
-  MATHICGB_ASSERT(g1->termsAreInDescendingOrder());
+    mRing.coefficientSet(newNode->value(), 0);
+    newNode->next() = bucket;
+    bucket = newNode;
+    ++mSize;
+    return std::make_pair(newNode, true); // inserted mono
+  }
 
-  HashPoly M;
-  mHashTableOLD.insert(multiplier, g1->begin(), g1->end(), M);
-  if (!M.empty())
-    mHeap.push(M.begin(),M.end());
+  MATHICGB_INLINE
+  void remove(Node* nodeToRemove) {
+    MATHICGB_ASSERT(nodeToRemove != 0);
+    MATHICGB_ASSERT(mNodes.fromPool(nodeToRemove));
+    const auto index = hashToIndex(monoid().hash(nodeToRemove->mono()));
+    auto nodePtr = &mBuckets[index];
+    while (*nodePtr != nodeToRemove) {
+      MATHICGB_ASSERT(*nodePtr != nullptr);
+      nodePtr = &(*nodePtr)->next();
+    }
+    *nodePtr = nodeToRemove->next();
+    mNodes.free(nodeToRemove);
+    --mSize;
+  }
 
-  stats_n_inserts++;
-  stats_n_compares += mHeap.getConfiguration().getComparisons();
-  mHeap.getConfiguration().resetComparisons();
-}
+  /// Removes all elements and optimizes internal resources. This is
+  /// fast if there are no elements, so if you know that there are no
+  /// elements and that many operations have happened since the last clear,
+  /// then call clear for better cache performance. If there is even one
+  /// element, then this takes linear time in the number of buckets.
+  void clear() {
+    if (!empty()) {
+      std::fill_n(mBuckets.get(), bucketCount(), nullptr);
+      mSize = 0;
+    }
+    mNodes.freeAllBuffers();
+  }
+
+  bool empty() const {return mSize == 0;}
+
+private:
+  static HashValue computeHashMask(const size_t requestedBucketCount) {
+    // round request up to nearest power of 2.
+    size_t pow2 = 1;
+    while (pow2 < requestedBucketCount && 2 * pow2 != 0)
+      pow2 *= 2;
+    MATHICGB_ASSERT(pow2 > 0 && (pow2 & (pow2 - 1)) == 0); // power of two
+
+    // If casting to a hash value overflows, then we get the maximum
+    // possible number of buckets based on the range of the hash
+    // value type. Only unsigned overflow is defined, so we need
+    // to assert that the hash type is unsigned.
+    static_assert(!std::numeric_limits<HashValue>::is_signed, "");
+    const auto hashToIndexMask = static_cast<HashValue>(pow2 - 1);
+    MATHICGB_ASSERT(pow2 == hashMaskToBucketCount(hashToIndexMask));
+    return hashToIndexMask;
+  }
+
+  static size_t hashMaskToBucketCount(const HashValue mask) {
+    const auto count = static_cast<size_t>(mask) + 1u; // should be power of 2
+    MATHICGB_ASSERT(count > 0 && (count & (count - 1)) == 0); 
+    return count;
+  }
+
+  static size_t sizeofNode(const PolyRing& ring) {
+    return
+      sizeof(Node) +
+      sizeof(Value) -
+      sizeof(exponent) +
+      ring.maxMonomialByteSize();
+  }
+
+  size_t hashToIndex(const HashValue hash) const {
+    const auto index = hashToIndex(hash, mHashToIndexMask);
+    MATHICGB_ASSERT(index == hash % bucketCount());
+    return index;
+  }
+
+  static size_t hashToIndex(const HashValue hash, const HashValue mask) {
+    return hash & mask;
+  }
+
+  HashValue mHashToIndexMask;
+  std::unique_ptr<Node*[]> mBuckets;
+  const PolyRing& mRing;
+  memt::BufferPool mNodes;
+  size_t mSize;
+};
+
+
+class BjarkeGeobucket2 : public TypicalReducer {
+public:
+  BjarkeGeobucket2(const PolyRing& ring):
+    mRing(ring),
+    mMap(10000, ring),
+    mQueue2(QueueConfiguration2(ring.monoid()))
+  {}
+
+  virtual std::string description() const {return "bjarke geo buckets";}
+
+  void insertTail(const_term multiplier, const Poly *g1) {
+    MATHICGB_ASSERT(g1 != 0);
+    MATHICGB_ASSERT(g1->termsAreInDescendingOrder());
+
+    if (g1->nTerms() <= 1)
+      return;
+
+    mNodesTmp.clear();
+    auto it = g1->begin();
+    const auto end = g1->end();
+    for (++it; it != end; ++it) {
+      auto p = mMap.insertProduct(it.getMonomial(), multiplier.monom);
+      coefficient prod;
+      mRing.coefficientMult(it.getCoefficient(), multiplier.coeff, prod);
+      mRing.coefficientAddTo(p.first->value(), prod);
+      if (p.second)
+        mNodesTmp.emplace_back(p.first);
+    }
+    if (!mNodesTmp.empty())
+      mQueue2.push(mNodesTmp.begin(), mNodesTmp.end());
+  }
 
-bool BjarkeGeobucket2::leadTerm(const_term &result)
-{
-  while (!mHeap.empty())
-    {
-      if (mHashTableOLD.popTerm(mHeap.top(), result.coeff, result.monom))
-        // returns true if mHeap.top() is not the zero element
+  void insert(monomial multiplier, const Poly *g1) {
+    MATHICGB_ASSERT(g1 != 0);
+    MATHICGB_ASSERT(g1->termsAreInDescendingOrder());
+
+    mNodesTmp.clear();
+    const auto end = g1->end();
+    for (auto it = g1->begin(); it != end; ++it) {
+      auto p = mMap.insertProduct(it.getMonomial(), multiplier);
+      mRing.coefficientAddTo(p.first->value(), it.getCoefficient());
+      if (p.second)
+        mNodesTmp.emplace_back(p.first);
+    }
+    if (!mNodesTmp.empty())
+      mQueue2.push(mNodesTmp.begin(), mNodesTmp.end());
+  }
+
+  virtual bool leadTerm(const_term& result) {
+    while (!mQueue2.empty()) {
+      const auto node = mQueue2.top();
+      if (node->value() != 0) {
+        result.coeff = node->value();
+        result.monom = Monoid::toOld(node->mono());
         return true;
-      mHeap.pop();
+      }
+      mQueue2.pop();
+      mMap.remove(node);
     }
-  return false;
-}
+    return false;
+  }
 
-void BjarkeGeobucket2::removeLeadTerm()
-// returns true if there is a term to extract
-{
-  mHeap.pop();
-}
+  virtual void removeLeadTerm() {
+    MATHICGB_ASSERT(!mQueue2.empty());
+    const auto node = mQueue2.top();
+    mQueue2.pop();
+    mMap.remove(node);
+  }
+
+  virtual size_t getMemoryUse() const {
+    size_t result = TypicalReducer::getMemoryUse();
+    //result += mMap.getMemoryUse();
+    return result;
+  }
 
-void BjarkeGeobucket2::value(Poly &result)
-// keep extracting lead term until done
-{
-  const_term t;
-  while (leadTerm(t))
-    {
-      result.appendTerm(t.coeff, t.monom);
-      mHeap.pop();
+
+protected:
+  void resetReducer() {
+    const_term t;
+    while (!mQueue2.empty()) {
+      const auto node = mQueue2.top();
+      mQueue2.pop();
+      mMap.remove(node);
     }
-  resetReducer();
-}
+    MATHICGB_ASSERT(mMap.empty());
+    MATHICGB_ASSERT(mQueue2.empty());
+    mMap.clear();
+  }
 
-void BjarkeGeobucket2::resetReducer()
-{
-  const_term t;
-  while (leadTerm(t))
-    {
-      mHeap.pop();
+private:
+  class QueueConfiguration {
+  public:
+    typedef PolyRing::Monoid Monoid;
+
+    QueueConfiguration(const Monoid& monoid):
+      mMonoid(monoid), geoBase(4), minBucketSize(1) {}
+
+    typedef PolyHashTable::node* Entry;
+
+    typedef bool CompareResult;
+    CompareResult compare(const Entry& a, const Entry& b) const {
+      return mMonoid.lessThan(a->monom, b->monom);
     }
-  mHashTableOLD.reset();
-  // how to reset mHeap ?
-}
+    bool cmpLessThan(CompareResult r) const {return r;}
 
-size_t BjarkeGeobucket2::getMemoryUse() const
-{
-  size_t result = TypicalReducer::getMemoryUse();
-  result += mHashTableOLD.getMemoryUse();
-  result += mHeap.getMemoryUse();
-  result += mHashTable.memoryUse();
-  return result;
-}
+    static const bool supportDeduplication = false;
+    bool cmpEqual(CompareResult r) const {
+      MATHICGB_ASSERT(false); // Not supposed to be used.
+      return false;
+    }
+    Entry deduplicate(const Entry& a, const Entry& /* b */) const {
+      MATHICGB_ASSERT(false); // Not supposed to be used.
+      return a;
+    }
+
+    static const bool minBucketBinarySearch = true;
+    static const bool trackFront = true;
+    static const bool premerge = false;
+    static const bool collectMax = false;
+    static const mic::GeobucketBucketStorage bucketStorage =
+      static_cast<mic::GeobucketBucketStorage>(1);
+    static const size_t insertFactor = 1;
+
+    const size_t geoBase;
+    const size_t minBucketSize;
+
+  private:
+    const Monoid& mMonoid;
+  };
+
+  class QueueConfiguration2 {
+  public:
+    typedef PolyRing::Monoid Monoid;
+
+    QueueConfiguration2(const Monoid& monoid):
+      mMonoid(monoid), geoBase(4), minBucketSize(1) {}
+
+    typedef MonoMap::Node* Entry;
+
+    typedef bool CompareResult;
+    CompareResult compare(const Entry& a, const Entry& b) const {
+      return mMonoid.lessThan(a->mono(), b->mono());
+    }
+    bool cmpLessThan(CompareResult r) const {return r;}
+
+    static const bool supportDeduplication = false;
+    bool cmpEqual(CompareResult r) const {
+      MATHICGB_ASSERT(false); // Not supposed to be used.
+      return false;
+    }
+    Entry deduplicate(const Entry& a, const Entry& /* b */) const {
+      MATHICGB_ASSERT(false); // Not supposed to be used.
+      return a;
+    }
+
+    static const bool minBucketBinarySearch = true;
+    static const bool trackFront = true;
+    static const bool premerge = false;
+    static const bool collectMax = false;
+    static const mic::GeobucketBucketStorage bucketStorage =
+      static_cast<mic::GeobucketBucketStorage>(1);
+    static const size_t insertFactor = 1;
+
+    const size_t geoBase;
+    const size_t minBucketSize;
+
+  private:
+    const Monoid& mMonoid;
+  };
+
+  mutable std::vector<MonoMap::Node*> mNodesTmp;
+  const PolyRing& mRing;
+  MonoMap mMap;
+  mic::Geobucket<QueueConfiguration2> mQueue2;
+};
 
-void BjarkeGeobucket2::dump() const
-{
-  mHashTableOLD.dump(0);
+std::unique_ptr<TypicalReducer> makeBjarkeGeobucket2(const PolyRing& ring) {
+  return make_unique<BjarkeGeobucket2>(ring);
 }
 
 MATHICGB_NAMESPACE_END
diff --git a/src/mathicgb/BjarkeGeobucket2.hpp b/src/mathicgb/BjarkeGeobucket2.hpp
index 2f4443c..eaffe5c 100755
--- a/src/mathicgb/BjarkeGeobucket2.hpp
+++ b/src/mathicgb/BjarkeGeobucket2.hpp
@@ -3,109 +3,12 @@
 #ifndef MATHICGB_BJARKE_GEOBUCKET2_GUARD
 #define MATHICGB_BJARKE_GEOBUCKET2_GUARD
 
-#include "TypicalReducer.hpp"
-#include "PolyHashTable.hpp"
-#include <mathic.h>
-
 MATHICGB_NAMESPACE_BEGIN
 
-class GeoConfiguration {
-public:
-  GeoConfiguration(
-                   const PolyRing &ring,
-                   size_t geoBase,
-                   size_t minBucketSize):
-    mRing(ring), geoBase(geoBase), minBucketSize(minBucketSize), _comparisons(0) {}
-
-  const PolyRing &mRing;
-  size_t geoBase;
-  size_t minBucketSize;
-
-  typedef PolyHashTable::node * Entry;
-
-  typedef bool CompareResult;
-
-  CompareResult compare(const Entry& a, const Entry& b) const {
-    ++_comparisons;
-    return mRing.monomialLT(a->monom, b->monom);
-  }
-  bool cmpLessThan(CompareResult r) const {return r;}
-
-  static const bool supportDeduplication = false;
-  bool cmpEqual(CompareResult r) const {MATHICGB_ASSERT(false);return r;} // NOT USED IN OUR CASE HERRE!
-  Entry deduplicate(const Entry& a, const Entry& /* b */) const {MATHICGB_ASSERT(false); return a;}
-
-  size_t getComparisons() const {return _comparisons;}
-  void resetComparisons() const {_comparisons = 0;}
-
-  static const bool minBucketBinarySearch = true; // MinBucketBinarySearch;
-  static const bool trackFront = true; //TrackFront;
-  static const bool premerge = false;
-  static const bool collectMax = false;
-  static const mic::GeobucketBucketStorage bucketStorage = static_cast<mic::GeobucketBucketStorage>(1);
-  static const size_t insertFactor = 1;
-
-private:
-  mutable size_t _comparisons;
-};
-
-class BjarkeGeobucket2Configuration
-{
-public:
-  typedef const_monomial Key;
-  typedef coefficient Value;
-
-  BjarkeGeobucket2Configuration(const PolyRing &ring) : mRing(ring) {}
-
-  size_t hash(Key k) {return mRing.monomialHashValue(k);}
-
-  bool keysEqual(Key k1, Key k2) {
-    return ((mRing.monomialHashValue(k1) == mRing.monomialHashValue(k2)) 
-            && mRing.monomialEQ(k1, k2));
-  }
-
-  void combine(Value &a, const Value &b) {mRing.coefficientAddTo(a,b);}
-private:
-  const PolyRing &mRing;
-};
-
-class BjarkeGeobucket2 : public TypicalReducer {
-public:
-  BjarkeGeobucket2(const PolyRing *R);
-
-  virtual std::string description() const { return "bjarke geo buckets"; }
-
-  void insertTail(const_term multiplier, const Poly *f);
-  void insert(monomial multiplier, const Poly *f);
-
-  virtual bool leadTerm(const_term &result);
-  virtual void removeLeadTerm();
-
-  void value(Poly &result); // keep extracting lead term until done
-
-  virtual size_t getMemoryUse() const;
-
-  void dump() const; // Used for debugging
-
-protected:
-  void resetReducer();
-
-private:
-  typedef mic::HashTable<BjarkeGeobucket2Configuration>::Handle node;
-  typedef PolyHashTable::MonomialArray HashPoly;
-
-  void insert(Poly::const_iterator first, 
-              Poly::const_iterator last,
-              std::vector<node*> &result);
-
-  void insert(const_term multiplier, Poly::iterator first, Poly::iterator last);
-
+class TypicalReducer;
+class PolyRing;
 
-  const PolyRing &mRing;
-  PolyHashTable mHashTableOLD;
-  mic::HashTable<BjarkeGeobucket2Configuration> mHashTable;
-  mic::Geobucket< GeoConfiguration > mHeap;
-};
+std::unique_ptr<TypicalReducer> makeBjarkeGeobucket2(const PolyRing& ring);
 
 MATHICGB_NAMESPACE_END
 #endif
diff --git a/src/mathicgb/MonoMonoid.hpp b/src/mathicgb/MonoMonoid.hpp
index 62bfa45..c2aa125 100755
--- a/src/mathicgb/MonoMonoid.hpp
+++ b/src/mathicgb/MonoMonoid.hpp
@@ -1561,6 +1561,8 @@ private:
   friend class MonoVector;
   friend class MonoPool;
 
+  friend class MonoMap;
+
   typedef typename Base::Gradings Gradings;
 
   bool debugAssertValid() const {
@@ -1635,6 +1637,11 @@ private:
     return true;
   }
 
+  static MonoPtr toMonoPtr(Exponent* raw) {return MonoPtr(raw);}
+  static ConstMonoPtr toMonoPtr(const Exponent* raw) {
+    return ConstMonoPtr(raw);
+  }
+
   // *** Accessing fields of a monomial
   template<class M>
   static auto rawPtr(M&& m) -> decltype(m.internalRawPtr()) {
diff --git a/src/mathicgb/PolyHashReducer.cpp b/src/mathicgb/PolyHashReducer.cpp
index 9164bc5..ccecebc 100755
--- a/src/mathicgb/PolyHashReducer.cpp
+++ b/src/mathicgb/PolyHashReducer.cpp
@@ -161,9 +161,4 @@ void PolyHashReducer::resetReducer()
   H_.reset();
 }
 
-void PolyHashReducer::dump() const
-{
-  H_.dump(0);
-}
-
 MATHICGB_NAMESPACE_END
diff --git a/src/mathicgb/PolyHashReducer.hpp b/src/mathicgb/PolyHashReducer.hpp
index d7c5e54..9178304 100755
--- a/src/mathicgb/PolyHashReducer.hpp
+++ b/src/mathicgb/PolyHashReducer.hpp
@@ -23,7 +23,6 @@ public:
   virtual void removeLeadTerm();
 
   void value(Poly &result); // keep extracting lead term until done
-  void dump() const;
 
   virtual size_t getMemoryUse() const;
 
diff --git a/src/mathicgb/PolyHashTable.cpp b/src/mathicgb/PolyHashTable.cpp
index 9df610d..46f81bf 100755
--- a/src/mathicgb/PolyHashTable.cpp
+++ b/src/mathicgb/PolyHashTable.cpp
@@ -9,9 +9,6 @@
 
 MATHICGB_NAMESPACE_BEGIN
 
-const double PolyHashTable::threshold = 0.1;
-const bool AlwaysInsertAtEnd = true;
-
 PolyHashTable::PolyHashTable(const PolyRing *R, int nbits)
   : mRing(*R),
     mHashMask((static_cast<size_t>(1) << nbits)-1),
@@ -26,17 +23,6 @@ PolyHashTable::PolyHashTable(const PolyRing *R, int nbits)
   mMonomialSize = R->maxMonomialSize() * sizeof(exponent);
   // set each entry of mHashTable to null
 
-  mStats.max_table_size = 0;
-  mStats.max_chain_len_ever = 0;
-  mStats.n_resets = 0;
-  mStats.max_n_nonempty_bins = 0;
-  mStats.n_inserts = 0;
-  mStats.n_moneq_true = 0;
-  mStats.n_moneq_false = 0;
-  mStats.n_easy_inserts = 0;
-
-  mStats.n_nodes = 0;
-  mStats.n_nonempty_bins = 0;
   reset();
 }
 
@@ -46,8 +32,6 @@ void PolyHashTable::reset()
 
   // Clear the table, and memory areas.
 
-  mStats.n_resets++;
-
 #if 0
   MATHICGB_ASSERT(mNodeCount != 0);
   for (size_t count = 0; count < mTableSize; ++count)
@@ -61,20 +45,8 @@ void PolyHashTable::reset()
 
   mBinCount = 0;
   mNodeCount = 0;
-  
-  resetStats();
-  MATHICGB_SLOW_ASSERT(computeNodeCount() == 0);
 }
 
-size_t PolyHashTable::computeNodeCount() const
-{
-  size_t result = 0;
-  for (size_t i=0; i<mTableSize; i++)
-    {
-      for (node *p=mHashTable[i]; p != 0; p=p->next) result++;
-    }
-  return result;
-}
 void PolyHashTable::resize(size_t new_nbits)
 // Don't change the nodes, table, but do recreate mHashTable
 {
@@ -83,7 +55,6 @@ void PolyHashTable::resize(size_t new_nbits)
   // Loop through each one, reinserting the node into the proper bin.
 
   //  std::cout << "resizing PolyHashTable to " << new_nbits << " bits" << " count=" << mNodeCount << std::endl;
-  MATHICGB_ASSERT(computeNodeCount() == mNodeCount);
   size_t const old_table_size = mTableSize;
   mTableSize = static_cast<size_t>(1) << new_nbits;
   mLogTableSize = new_nbits;
@@ -105,7 +76,7 @@ void PolyHashTable::resize(size_t new_nbits)
           const_monomial m = q->monom;
           size_t hashval = mRing.monomialHashValue(m) & mHashMask;
           node *r = mHashTable[hashval];
-          if (r == 0 || !AlwaysInsertAtEnd) 
+          if (r == 0) 
             {
               mBinCount++;
               q->next = r;
@@ -120,22 +91,15 @@ void PolyHashTable::resize(size_t new_nbits)
         }
     }
 
-  mStats.max_table_size = mTableSize;
-
   // todo: consider if this can overflow or something else nasty might happen
+  const double threshold = 0.1;
   mMaxCountBeforeRebuild =
     static_cast<size_t>(std::floor(mTableSize * threshold));
-
-  MATHICGB_ASSERT(computeNodeCount() == mNodeCount);
 }
 
 PolyHashTable::node * PolyHashTable::makeNode(coefficient coeff, const_monomial monom)
 {
   mNodeCount++;
-  if (mNodeCount > mStats.n_nodes)
-    mStats.n_nodes = mNodeCount;
-  if (mBinCount > mStats.n_nonempty_bins)
-    mStats.n_nonempty_bins = mBinCount;
   node *q = static_cast<node *>(mArena.allocObjectNoCon<node>());
   q->next = 0;
   q->monom = monom; 
@@ -146,134 +110,90 @@ PolyHashTable::node * PolyHashTable::makeNode(coefficient coeff, const_monomial
 bool PolyHashTable::lookup_and_insert(const_monomial m, coefficient val, node *& result)
 // Returns true if m is in the table, else inserts m into the hash table (as is, without copying it)
 {
-  mStats.n_inserts++;
-
   size_t fullHashVal = mRing.monomialHashValue(m);
   size_t hashval = fullHashVal & mHashMask;
 
   MATHICGB_ASSERT(hashval < mHashTable.size());
   node *tmpNode = mHashTable[hashval];
-  if (tmpNode == 0)
-    {
-      mStats.n_easy_inserts++;
-      result = makeNode(val, m);
-      mHashTable[hashval] = result;
+  if (tmpNode == 0) {
+    result = makeNode(val, m);
+    mHashTable[hashval] = result;
+  } else {
+    while (true) {
+      if (mRing.monomialHashValue(tmpNode->monom) == fullHashVal && mRing.monomialEQ(m, tmpNode->monom)) {
+        mRing.coefficientAddTo(tmpNode->coeff, val);
+        result = tmpNode;
+        return true;
+      }
+      if (tmpNode->next == 0) {
+        result = makeNode(val, m);
+        tmpNode->next = result;
+        break;
+      }
+      tmpNode = tmpNode->next;
     }
-  else
-    {
-      // loop through to see if we have it
-      size_t chainLength = 0;
-      while (true)
-        {
-          if (mRing.monomialHashValue(tmpNode->monom) == fullHashVal && mRing.monomialEQ(m, tmpNode->monom))
-            {
-              mStats.n_moneq_true++;
-              mRing.coefficientAddTo(tmpNode->coeff, val);
-              result = tmpNode;
-              return true;
-            }
-          mStats.n_moneq_false++;
-          if (tmpNode->next == 0)
-            {
-              // time to insert the monomial
-              result = makeNode(val, m);
-              chainLength++;
-              if (AlwaysInsertAtEnd)
-                {
-                  tmpNode->next = result;
-                }
-              else
-                {
-                  result->next = mHashTable[hashval];
-                  mHashTable[hashval] = result;
-                }
-              break;
-            }
-          tmpNode = tmpNode->next;
-          chainLength++;
-        }
-      if (chainLength > mStats.max_chain_len_ever)
-        mStats.max_chain_len_ever = chainLength;
-    }
-
+  }
 
   if (mNodeCount > mMaxCountBeforeRebuild)
     resize(mLogTableSize + 2);  // increase by a factor of 4??
 
-  MATHICGB_SLOW_ASSERT(computeNodeCount() == mNodeCount);
-
   return false;
 }
 
-void PolyHashTable::insert(Poly::const_iterator first, 
-                           Poly::const_iterator last,
-                           MonomialArray &result)
-{
-  for (Poly::const_iterator i = first; i != last; ++i)
-    {
-      monomial monomspace = mRing.allocMonomial(mArena);
-      node *p;
-      mRing.monomialCopy(i.getMonomial(), monomspace);
-      bool found = lookup_and_insert(monomspace, i.getCoefficient(), p);
-      if (found)
-        {
-          // remove the monomial.  It should be at the top of the mArena arena.
-          mRing.freeTopMonomial(mArena,monomspace);
-        }
-      else
-        {
-          result.push_back(p);
-        }
-    }
+void PolyHashTable::insert(
+  Poly::const_iterator first, 
+  Poly::const_iterator last,
+  MonomialArray &result
+) {
+  for (auto i = first; i != last; ++i) {
+    monomial monomspace = mRing.allocMonomial(mArena);
+    node* p;
+    mRing.monomialCopy(i.getMonomial(), monomspace);
+    bool found = lookup_and_insert(monomspace, i.getCoefficient(), p);
+    if (found)
+      mRing.freeTopMonomial(mArena,monomspace);
+    else
+      result.push_back(p);
+  }
 }
 
-void PolyHashTable::insert(const_term multiplier, 
-                           Poly::const_iterator first, 
-                           Poly::const_iterator last,
-                           MonomialArray &result)
-{
-  for (Poly::const_iterator i = first; i != last; ++i)
-    {
-      monomial monomspace = mRing.allocMonomial(mArena);
-      coefficient c;
-      mRing.coefficientSet(c, multiplier.coeff);
-      node *p;
-      mRing.monomialMult(multiplier.monom, i.getMonomial(), monomspace);
-      mRing.coefficientMultTo(c, i.getCoefficient());
-      bool found = lookup_and_insert(monomspace, c, p);
-      if (found)
-        {
-          // remove the monomial.  It should be at the top of the mArena arena.
-          mRing.freeTopMonomial(mArena,monomspace);
-        }
-      else
-        {
-          result.push_back(p);
-        }
-    }
+void PolyHashTable::insert(
+  const_term multiplier, 
+  Poly::const_iterator first, 
+  Poly::const_iterator last,
+  MonomialArray &result
+) {
+  for (auto i = first; i != last; ++i) {
+    monomial monomspace = mRing.allocMonomial(mArena);
+    coefficient c;
+    mRing.coefficientSet(c, multiplier.coeff);
+    node* p;
+    mRing.monomialMult(multiplier.monom, i.getMonomial(), monomspace);
+    mRing.coefficientMultTo(c, i.getCoefficient());
+    bool found = lookup_and_insert(monomspace, c, p);
+    if (found)
+      mRing.freeTopMonomial(mArena,monomspace);
+    else
+      result.push_back(p);
+  }
 }
 
-void PolyHashTable::insert(const_monomial multiplier, 
-                           Poly::const_iterator first, 
-                           Poly::const_iterator last,
-                           MonomialArray &result)
-{
-  for (Poly::const_iterator i = first; i != last; ++i)
-    {
-      monomial monomspace = mRing.allocMonomial(mArena);
-      node *p;
-      mRing.monomialMult(multiplier, i.getMonomial(), monomspace);
-      bool found = lookup_and_insert(monomspace, i.getCoefficient(), p);
-      if (found)
-        {
-          // remove the monomial. It should be at the top of the mArena arena.
-          mRing.freeTopMonomial(mArena,monomspace);
-        }
-      else
-        {
-          result.push_back(p);
-        }
-    }
+void PolyHashTable::insert(
+  const_monomial multiplier, 
+  Poly::const_iterator first, 
+  Poly::const_iterator last,
+  MonomialArray& result
+) {
+  for (Poly::const_iterator i = first; i != last; ++i) {
+    monomial monomspace = mRing.allocMonomial(mArena);
+    node* p;
+    mRing.monomialMult(multiplier, i.getMonomial(), monomspace);
+    bool found = lookup_and_insert(monomspace, i.getCoefficient(), p);
+    if (found)
+      mRing.freeTopMonomial(mArena,monomspace);
+    else
+      result.push_back(p);
+  }
 }
 
 std::pair<bool, PolyHashTable::node*>
@@ -322,29 +242,6 @@ bool PolyHashTable::popTerm(node *p, coefficient &result_coeff, const_monomial &
   return false;
 }
 
-void PolyHashTable::toPoly(const MonomialArray::const_iterator &fbegin,
-                           const MonomialArray::const_iterator &fend,
-                           Poly &result)
-{
-  coefficient coeff;
-  const_monomial monom;
-  for (MonomialArray::const_iterator i = fbegin; i != fend; ++i)
-    if (popTerm(*i, coeff, monom))
-      result.appendTerm(coeff, monom);
-}
-
-void PolyHashTable::toPoly(const MonomialArray &f, Poly &result)
-{
-  // Here we take the monomials in f.    Find corresponding coeff, and append to result.
-  // ASSUMPTION: The monomials of f are in order, AND each appears in the hash table
-  toPoly(f.begin(), f.end(), result);
-}
-
-void PolyHashTable::fromPoly(const Poly &f, MonomialArray &result)
-{
-  insert(f.begin(), f.end(), result);
-}
-
 size_t PolyHashTable::getMemoryUse() const
 {
   size_t result = mHashTable.capacity() * sizeof(node *);
@@ -352,111 +249,4 @@ size_t PolyHashTable::getMemoryUse() const
   return result;
 }
 
-void PolyHashTable::resetStats() const
-{
-  //  mStats.max_chain_len = 0;
-  mStats.n_nonempty_bins = 0;
-}
-
-void PolyHashTable::getStats(Stats &stats) const
-{
-  // First we set the values in mStats
-
-  //  mStats.max_chain_len = 0;
-
-#if 0
-  mStats.n_nonempty_bins = 0;
-  mStats.n_nodes = 0;
-  for (size_t i = 0; i<mTableSize; i++)
-    {
-      if (mHashTable[i] == 0) continue;
-      mStats.n_nonempty_bins++;
-      size_t chain_len = 0;
-      for (node *p = mHashTable[i]; p != 0; p = p->next)
-        chain_len++;
-      mStats.n_nodes += chain_len;
-      if (chain_len > mStats.max_chain_len)
-        mStats.max_chain_len = chain_len;
-    }
-
-  if (mStats.max_chain_len > mStats.max_chain_len_ever)
-    mStats.max_chain_len_ever = mStats.max_chain_len;
-#endif
-
-  if (&stats != &mStats)
-    stats = mStats;
-}
-
-void PolyHashTable::dump(int level) const
-{
-  mic::ColumnPrinter pr;
-  pr.addColumn();
-  pr.addColumn(false);
-  pr.addColumn(false);
-
-  std::ostream& name = pr[0];
-  std::ostream& value = pr[1];
-  std::ostream& extra = pr[2];
-
-  name << "PolyHashTable stats:" << '\n';
-  value << "\n";
-  extra << "\n";
-
-  name << "# resets:\n";
-  value << mic::ColumnPrinter::commafy(mStats.n_resets) << '\n';
-  extra << '\n';
-
-  name << "# bins:\n";
-  value << mic::ColumnPrinter::commafy(mTableSize) << '\n';
-  extra << '\n';
-
-  name << "max # monomials in table:\n";
-  value << mic::ColumnPrinter::commafy(mStats.n_nodes) << '\n';
-  extra << '\n';
-
-  name << "max # nonempty bins:\n";
-  value << mic::ColumnPrinter::commafy(mStats.n_nonempty_bins) << '\n';
-  extra << mic::ColumnPrinter::
-    percentInteger(mStats.n_nonempty_bins, mTableSize) << " used\n";
-
-  name << "max chain length ever:\n";
-  value << mic::ColumnPrinter::commafy(mStats.max_chain_len_ever) << '\n';
-  extra << '\n';
-
-  //  name << "max chain length this computation:\n";
-  //  value << mic::ColumnPrinter::commafy(mStats.max_chain_len) << '\n';
-  //  extra << '\n';
-
-  name << "# calls to lookup_and_insert:\n";
-  value << mic::ColumnPrinter::commafy(mStats.n_inserts) << '\n';
-  extra << '\n';
-
-  name << "# easy inserts:\n";
-  value << mic::ColumnPrinter::commafy(mStats.n_easy_inserts) << '\n';
-  extra << '\n';
-
-  name << "# monomialEQ true calls:\n";
-  value << mic::ColumnPrinter::commafy(mStats.n_moneq_true) << '\n';
-  extra << '\n';
-
-  name << "# monomialEQ false calls:\n";
-  value << mic::ColumnPrinter::commafy(mStats.n_moneq_false) << '\n';
-  extra << "(Also number of monomials inserted in populated bins)\n";
-
-  std::cout << pr << std::flush;
-
-  if (level == 0) return;
-
-  for (size_t i = 0; i<mTableSize; i++)
-    {
-      if (mHashTable[i] == 0) continue;
-      std::cout << "bin " << i << ": ";
-      Poly f(mRing);
-      for (node *p = mHashTable[i]; p != 0; p = p->next)
-        f.appendTerm(p->coeff, p->monom);
-      f.display(std::cout);
-      std::cout << std::endl;
-    }
-}
-
 MATHICGB_NAMESPACE_END
diff --git a/src/mathicgb/PolyHashTable.hpp b/src/mathicgb/PolyHashTable.hpp
index d5801f0..2fd0b51 100755
--- a/src/mathicgb/PolyHashTable.hpp
+++ b/src/mathicgb/PolyHashTable.hpp
@@ -21,67 +21,25 @@ MATHICGB_NAMESPACE_BEGIN
 //
 // Does not take ownership of any of the monomials.
 class PolyHashTable {
-
-  static const double threshold; // if the number of elements is > threshold * (#monomials contained),
-  // then rebuild the table
-
-  // If true, then remove zero entries from hash table.
-  static const bool removeZerosFromTable = true;
-
 public:
   struct node {
     node *next;
     coefficient coeff;
     const_monomial monom;
-    void *unused;
   };
 
-  struct Stats {
-    // These first ones are not reset
-    size_t max_table_size;
-    size_t max_chain_len_ever; // not reset after each reduction
-    size_t n_resets;  // actually, number of times 'reset' is called
-    size_t max_n_nonempty_bins;
-    size_t n_inserts; // # calls to insert a monomial
-    size_t n_moneq_true; // total number of true monomialEQ calls during lookup_and_insert
-    size_t n_moneq_false;  // same, but number for false.
-    size_t n_easy_inserts;
-
-    // These are set after each reduction
-    //    size_t max_chain_len;
-    size_t n_nodes; // # of unique monomials represented here, maximum since last reset
-    size_t n_nonempty_bins;
-  };
-
-  typedef std::vector<node *> MonomialArray;
+  typedef std::vector<node*> MonomialArray;
 
   PolyHashTable(const PolyRing *R, int nbits);
 
-  ~PolyHashTable() {}
-
-  std::string description() const { return "polynomial hash table"; }
+  std::string description() const {return "polynomial hash table";}
 
   void reset();  // Clear the table, and memory areas.
 
   void resize(size_t new_nbits);  // Don't change the nodes, table, but do recreate hashtable_
 
-  void fromPoly(const Poly &f, MonomialArray &result);
-
-  void toPoly(const MonomialArray &f, Poly &result);
-
-  void toPoly(const MonomialArray::const_iterator &fbegin,
-              const MonomialArray::const_iterator &fend,
-              Poly &result);
-
-  void getStats(Stats &stats) const; // set the stats table with current values
-
   size_t getMemoryUse() const;
 
-  void resetStats() const; // reset all values to 0
-
-  void dump(int level = 0) const; // For debugging: display the current state of the table
-
-
   //@ insert multiplier * g: any monomials already in the hash table are removed,
   // but their field coefficients in the table are modified accordingly.
   // Resulting pointers to 'node's are placed, in order, into result.
@@ -113,20 +71,17 @@ public:
   // be valid until a 'reset' is called.
   bool popTerm(node *p, coefficient &result_coeff, const_monomial &result_monom);
 
-  size_t getNodeCount() const { return mNodeCount; }
 protected:
-  size_t computeNodeCount() const;
   node * makeNode(coefficient coeff, const_monomial monom);
   void unlink(node *p);
   bool lookup_and_insert(const_monomial m, coefficient val, node *&result);
 
-  const PolyRing &mRing;
-  std::vector<node *> mHashTable;
+  const PolyRing& mRing;
+  std::vector<node*> mHashTable;
   size_t mHashMask; // this is the number, in binary:  00001111...1, where
                     // the number of 1's is mLogTableSize
 
   memt::Arena mArena; // space for monomials represented in this class.  Also nodes??
-  mutable Stats mStats;
 
   size_t mTableSize;
   size_t mLogTableSize; // number of bits in the table: mTableSize should be 2^mLogTableSize
diff --git a/src/mathicgb/Reducer.cpp b/src/mathicgb/Reducer.cpp
index a06a7b4..6bd2d2d 100755
--- a/src/mathicgb/Reducer.cpp
+++ b/src/mathicgb/Reducer.cpp
@@ -60,7 +60,7 @@ std::unique_ptr<Reducer> Reducer::makeReducerNullOnUnknown(
   case Reducer_PolyHash:
     return std::unique_ptr<Reducer>(new PolyHashReducer(&ring));
   case Reducer_BjarkeGeo:
-    return std::unique_ptr<Reducer>(new BjarkeGeobucket2(&ring));
+    return makeBjarkeGeobucket2(ring);
   case Reducer_TournamentTree:
     return std::unique_ptr<Reducer>(new TournamentReducer(ring));
   case Reducer_HashTourTree:
diff --git a/src/mathicgb/ReducerHash.hpp b/src/mathicgb/ReducerHash.hpp
index 0db8685..e8032d2 100755
--- a/src/mathicgb/ReducerHash.hpp
+++ b/src/mathicgb/ReducerHash.hpp
@@ -17,7 +17,6 @@ template<template<typename> class Queue>
 class ReducerHash : public TypicalReducer {
 public:
   ReducerHash(const PolyRing &ring);
-  ~ReducerHash();
 
   virtual std::string description() const { 
     return mQueue.getName() + "-hashed";
@@ -39,45 +38,25 @@ public:
   public:
     typedef PolyHashTable::node * Entry;
 
-    Configuration(const PolyRing& ring): 
-      PlainConfiguration(ring),
-      mComparisonCount(0) {}
+    Configuration(const PolyRing& ring): PlainConfiguration(ring) {}
 
     CompareResult compare(const Entry& a, const Entry& b) const {
-      ++mComparisonCount;
       return ring().monomialLT(a->monom, b->monom);
     }
-
-    unsigned long long getComparisonCount() const {return mComparisonCount;}
-
-    void resetComparisonCount() const {mComparisonCount = 0;}
-    
-  private:
-    mutable unsigned long long mComparisonCount;
   };
   
 private:
   const PolyRing &mRing;
   PolyHashTable mHashTable;
   Queue<Configuration> mQueue;
-
-  // Number of (distinct) monomials in mQueue.  
-  // Statistics and debugging use only
-  size_t mNodeCount;  
 };
 
 template<template<typename> class Q>
 ReducerHash<Q>::ReducerHash(const PolyRing &ring):
   mRing(ring),
   mHashTable(&ring,10),
-  mQueue(Configuration(ring)),
-  mNodeCount(0) {
-}
-
-template<template<typename> class Q>
-ReducerHash<Q>::~ReducerHash()
-{
-}
+  mQueue(Configuration(ring))
+{}
 
 ///////////////////////////////////////
 // External interface routines ////////
@@ -88,20 +67,10 @@ void ReducerHash<Q>::insertTail(const_term multiplier, const Poly *g1)
 {
   if (g1->nTerms() <= 1) return;
 
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
   PolyHashTable::MonomialArray M;
   mHashTable.insert(multiplier, ++(g1->begin()), g1->end(), M);
-
-  if (!M.empty()) {
+  if (!M.empty())
     mQueue.push(M.begin(),M.end());
-    mNodeCount += M.size();
-  }
-
-  stats_n_inserts++;
-  stats_n_compares += mQueue.getConfiguration().getComparisonCount();
-  mQueue.getConfiguration().resetComparisonCount();
-
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
 }
 
 template<template<typename> class Q>
@@ -109,38 +78,21 @@ void ReducerHash<Q>::insert(monomial multiplier, const Poly *g1)
 {
   PolyHashTable::MonomialArray M;
 
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
-
   mHashTable.insert(multiplier, g1->begin(), g1->end(), M);
 
   if (!M.empty())
-    {
-      mQueue.push(M.begin(),M.end());
-#if 0
-      for (PolyHashTable::MonomialArray::const_iterator a = M.begin(); a != M.end(); ++a)
-        mQueue.push(*a);
-#endif
-      mNodeCount += M.size();
-    }
-
-  stats_n_inserts++;
-  stats_n_compares += mQueue.getConfiguration().getComparisonCount();
-  mQueue.getConfiguration().resetComparisonCount();
-
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
+    mQueue.push(M.begin(),M.end());
 }
 
 template<template<typename> class Q>
 bool ReducerHash<Q>::leadTerm(const_term &result)
 {
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
   while (!mQueue.empty())
     {
       if (mHashTable.popTerm(mQueue.top(), result.coeff, result.monom))
         // returns true if mQueue.top() is not the zero element
         return true;
       mQueue.pop();
-      mNodeCount--;
     }
   return false;
 }
@@ -150,24 +102,17 @@ void ReducerHash<Q>::removeLeadTerm()
 // returns true if there is a term to extract
 {
   mQueue.pop();
-  mNodeCount--;
-
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
 }
 
 template<template<typename> class Q>
 void ReducerHash<Q>::resetReducer()
 {
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
   const_term t;
   while (leadTerm(t))
     {
       mQueue.pop();
-      mNodeCount--;
     }
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
   mHashTable.reset();
-  MATHICGB_ASSERT(mNodeCount == mHashTable.getNodeCount());
   // how to reset mQueue ?
 }
 
diff --git a/src/test/poly-test.cpp b/src/test/poly-test.cpp
index fb359bc..d80d974 100755
--- a/src/test/poly-test.cpp
+++ b/src/test/poly-test.cpp
@@ -735,43 +735,6 @@ std::string somePolys =
   d2e3f4+de4f4+bc2df5+abd2f5+bcd2f5+c2d2f5+bc2ef5+abdef5+bd2ef5+d3ef5+b2e2f5+ace2f5+bce2f5+cde2f5+ae3f5+de3f5\n\
 ";
 
-TEST(PolyHashTable,test1) {
-  std::unique_ptr<PolyRing> R = ringFromString("32003 6 1\n1 1 1 1 1 1");
-  PolyHashTable H(R.get(),3);
-  std::unique_ptr<Poly> f1 = polyParseFromString(R.get(), "3bd2+7cd2+5c2f+2adf+bdf+10cef");
-  PolyHashTable::MonomialArray M1, M2;
-  H.fromPoly(*f1, M1);
-  H.fromPoly(*f1, M2);
-  EXPECT_TRUE(M2.empty());
-  Poly g(*R);
-  H.toPoly(M1,g);
-  //  f1->display(std::cout);
-  //  std::cout << std::endl;
-  //  g.display(std::cout);
-  //  std::cout << std::endl;
-  f1->multByCoefficient(2);
-  EXPECT_TRUE(g == *f1);
-  //  H.dump();
-  H.resize(6);
-  //  H.dump();
-  M1.clear();
-  H.fromPoly(*f1, M1);
-  Poly g2(*R);
-  H.toPoly(M1,g2);
-  EXPECT_TRUE(g == g2);
-}
-
-TEST(PolyHashTable,test2) {
-  std::unique_ptr<PolyRing> R(ringFromString("32003 6 1\n1 1 1 1 1 1"));
-  PolyHashTable H(R.get(), 3);
-  std::unique_ptr<Poly> f1(polyParseFromString(R.get(), "3bd2+7cd2+5c2f+2adf+bdf+10cef"));
-  std::unique_ptr<Poly> f2(polyParseFromString(R.get(), "-3bd2+4c2f+cef+f3"));
-  PolyHashTable::MonomialArray M1, M2;
-  H.fromPoly(*f1, M1);
-  H.fromPoly(*f2, M2);
-  //  H.dump(1);
-}
-
 TEST(MonomialHashTable,test1) {
   std::unique_ptr<PolyRing> R = ringFromString("32003 6 1\n1 1 1 1 1 1");
   MonomialHashTable H(R.get(), 3);

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/mathicgb.git



More information about the debian-science-commits mailing list