aboutsummaryrefslogtreecommitdiffstats
path: root/chilbert/Hilbert.ipp
diff options
context:
space:
mode:
Diffstat (limited to 'chilbert/Hilbert.ipp')
-rw-r--r--chilbert/Hilbert.ipp10
1 files changed, 5 insertions, 5 deletions
diff --git a/chilbert/Hilbert.ipp b/chilbert/Hilbert.ipp
index 571166d..837375e 100644
--- a/chilbert/Hilbert.ipp
+++ b/chilbert/Hilbert.ipp
@@ -323,7 +323,7 @@ coords_to_compact_index(const P* const p,
// speed by a bit (4% when n=4, m=20)
size_t* const ds = new size_t[m];
- if (mn > FBV_BITS) {
+ if (mn > SmallBitVec::bits_per_rack) {
DynamicBitVec h(mn);
detail::coords_to_index<P, DynamicBitVec, I>(
p, m, n, h, std::move(scratch), ds);
@@ -421,7 +421,7 @@ coords_to_index(const P* const p, const size_t m, const size_t n, H& h)
assert(detail::num_bits(h) >= n * m);
assert(detail::num_bits(p[0]) >= m);
- if (n <= FBV_BITS) {
+ if (n <= SmallBitVec::bits_per_rack) {
// Intermediate variables will fit in fixed width
detail::coords_to_index<P, H, SmallBitVec>(p, m, n, h, SmallBitVec(n));
} else {
@@ -440,7 +440,7 @@ index_to_coords(P* const p, const size_t m, const size_t n, const H& h)
assert(detail::num_bits(h) >= n * m);
assert(detail::num_bits(p[0]) >= m);
- if (n <= FBV_BITS) {
+ if (n <= SmallBitVec::bits_per_rack) {
// Intermediate variables will fit in fixed width
detail::index_to_coords<P, H, SmallBitVec>(p, m, n, h, SmallBitVec(n));
} else {
@@ -461,7 +461,7 @@ coords_to_compact_index(const P* const p,
{
assert(hc.size() >= std::accumulate(ms, ms + n, size_t(0)));
- if (n <= FBV_BITS) {
+ if (n <= SmallBitVec::bits_per_rack) {
// Intermediate variables will fit in fixed width
detail::coords_to_compact_index<P, HC, SmallBitVec>(
p, ms, n, hc, SmallBitVec(n), M, m);
@@ -483,7 +483,7 @@ compact_index_to_coords(P* const p,
{
assert(hc.size() >= std::accumulate(ms, ms + n, size_t(0)));
- if (n <= FBV_BITS) {
+ if (n <= SmallBitVec::bits_per_rack) {
// Intermediate variables will fit in fixed width
SmallBitVec scratch(n);
detail::compact_index_to_coords<P, HC, SmallBitVec>(