2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
21 #include <type_traits>
23 #include <folly/Bits.h>
24 #include <folly/Likely.h>
25 #include <folly/Portability.h>
26 #include <folly/Range.h>
27 #include <folly/experimental/Bits.h>
28 #include <folly/experimental/CodingDetail.h>
29 #include <folly/experimental/Instructions.h>
30 #include <folly/experimental/Select64.h>
31 #include <glog/logging.h>
34 #error BitVectorCoding.h requires x86_64
37 namespace folly { namespace compression {
39 static_assert(kIsLittleEndian, "BitVectorCoding.h requires little endianness");
41 template <class Pointer>
42 struct BitVectorCompressedListBase {
43 BitVectorCompressedListBase() = default;
45 template <class OtherPointer>
46 BitVectorCompressedListBase(
47 const BitVectorCompressedListBase<OtherPointer>& other)
49 upperBound(other.upperBound),
51 bits(reinterpret_cast<Pointer>(other.bits)),
52 skipPointers(reinterpret_cast<Pointer>(other.skipPointers)),
53 forwardPointers(reinterpret_cast<Pointer>(other.forwardPointers)) {}
55 template <class T = Pointer>
56 auto free() -> decltype(::free(T(nullptr))) {
57 return ::free(data.data());
61 size_t upperBound = 0;
63 folly::Range<Pointer> data;
65 Pointer bits = nullptr;
66 Pointer skipPointers = nullptr;
67 Pointer forwardPointers = nullptr;
70 typedef BitVectorCompressedListBase<const uint8_t*> BitVectorCompressedList;
71 typedef BitVectorCompressedListBase<uint8_t*> MutableBitVectorCompressedList;
73 template <class Value,
75 size_t kSkipQuantum = 0,
76 size_t kForwardQuantum = 0>
77 struct BitVectorEncoder {
78 static_assert(std::is_integral<Value>::value &&
79 std::is_unsigned<Value>::value,
80 "Value should be unsigned integral");
82 typedef BitVectorCompressedList CompressedList;
83 typedef MutableBitVectorCompressedList MutableCompressedList;
85 typedef Value ValueType;
86 typedef SkipValue SkipValueType;
89 static constexpr size_t skipQuantum = kSkipQuantum;
90 static constexpr size_t forwardQuantum = kForwardQuantum;
92 template <class RandomAccessIterator>
93 static MutableCompressedList encode(RandomAccessIterator begin,
94 RandomAccessIterator end) {
96 return MutableCompressedList();
98 BitVectorEncoder encoder(size_t(end - begin), *(end - 1));
99 for (; begin != end; ++begin) {
102 return encoder.finish();
105 explicit BitVectorEncoder(const MutableCompressedList& result)
106 : bits_(result.bits),
107 skipPointers_(result.skipPointers),
108 forwardPointers_(result.forwardPointers),
110 memset(result.data.data(), 0, result.data.size());
113 BitVectorEncoder(size_t size, ValueType upperBound)
115 Layout::fromUpperBoundAndSize(upperBound, size).allocList()) {}
117 void add(ValueType value) {
118 CHECK_LT(value, std::numeric_limits<ValueType>::max());
119 // Also works when lastValue_ == -1.
120 CHECK_GT(value + 1, lastValue_ + 1)
121 << "BitVectorCoding only supports stricly monotone lists";
123 auto block = bits_ + (value / 64) * sizeof(uint64_t);
124 size_t inner = value % 64;
125 folly::Bits<folly::Unaligned<uint64_t>>::set(
126 reinterpret_cast<folly::Unaligned<uint64_t>*>(block), inner);
128 if (skipQuantum != 0) {
129 size_t nextSkipPointerSize = value / skipQuantum;
130 while (skipPointersSize_ < nextSkipPointerSize) {
131 auto pos = skipPointersSize_++;
132 folly::storeUnaligned<SkipValueType>(
133 skipPointers_ + pos * sizeof(SkipValueType), size_);
137 if (forwardQuantum != 0) {
138 if (size_ != 0 && (size_ % forwardQuantum == 0)) {
139 const auto pos = size_ / forwardQuantum - 1;
140 folly::storeUnaligned<SkipValueType>(
141 forwardPointers_ + pos * sizeof(SkipValueType), value);
149 const MutableCompressedList& finish() const {
150 CHECK_EQ(size_, result_.size);
151 // TODO(ott): Relax this assumption.
152 CHECK_EQ(result_.upperBound, lastValue_);
157 uint8_t* const bits_ = nullptr;
158 uint8_t* const skipPointers_ = nullptr;
159 uint8_t* const forwardPointers_ = nullptr;
161 ValueType lastValue_ = -1;
163 size_t skipPointersSize_ = 0;
165 MutableCompressedList result_;
168 template <class Value,
171 size_t kForwardQuantum>
172 struct BitVectorEncoder<Value, SkipValue, kSkipQuantum, kForwardQuantum>::
174 static Layout fromUpperBoundAndSize(size_t upperBound, size_t size) {
177 layout.upperBound = upperBound;
179 size_t bitVectorSizeInBytes = (upperBound / 8) + 1;
180 layout.bits = bitVectorSizeInBytes;
182 if (skipQuantum != 0) {
183 size_t numSkipPointers = upperBound / skipQuantum;
184 layout.skipPointers = numSkipPointers * sizeof(SkipValueType);
186 if (forwardQuantum != 0) {
187 size_t numForwardPointers = size / forwardQuantum;
188 layout.forwardPointers = numForwardPointers * sizeof(SkipValueType);
191 CHECK_LT(size, std::numeric_limits<SkipValueType>::max());
196 size_t bytes() const { return bits + skipPointers + forwardPointers; }
198 template <class Range>
199 BitVectorCompressedListBase<typename Range::iterator> openList(
201 BitVectorCompressedListBase<typename Range::iterator> result;
203 result.upperBound = upperBound;
204 result.data = buf.subpiece(0, bytes());
205 auto advance = [&](size_t n) {
206 auto begin = buf.data();
211 result.bits = advance(bits);
212 result.skipPointers = advance(skipPointers);
213 result.forwardPointers = advance(forwardPointers);
214 CHECK_EQ(buf.data() - result.data.data(), bytes());
219 MutableCompressedList allocList() const {
220 uint8_t* buf = nullptr;
222 buf = static_cast<uint8_t*>(malloc(bytes() + 7));
224 folly::MutableByteRange bufRange(buf, bytes());
225 return openList(bufRange);
229 size_t upperBound = 0;
233 size_t skipPointers = 0;
234 size_t forwardPointers = 0;
239 class Instructions = instructions::Default,
240 bool kUnchecked = false>
241 class BitVectorReader : detail::ForwardPointers<Encoder::forwardQuantum>,
242 detail::SkipPointers<Encoder::skipQuantum> {
244 typedef Encoder EncoderType;
245 typedef typename Encoder::ValueType ValueType;
246 // A bitvector can only be as large as its largest value.
247 typedef typename Encoder::ValueType SizeType;
248 typedef typename Encoder::SkipValueType SkipValueType;
250 explicit BitVectorReader(const typename Encoder::CompressedList& list)
251 : detail::ForwardPointers<Encoder::forwardQuantum>(list.forwardPointers),
252 detail::SkipPointers<Encoder::skipQuantum>(list.skipPointers),
257 if (kUnchecked || UNLIKELY(list.size == 0)) {
262 upperBound_ = list.upperBound;
266 block_ = (bits_ != nullptr) ? folly::loadUnaligned<uint64_t>(bits_) : 0;
269 value_ = kInvalidValue;
273 if (!kUnchecked && UNLIKELY(position() + 1 >= size_)) {
277 while (block_ == 0) {
278 outer_ += sizeof(uint64_t);
279 block_ = folly::loadUnaligned<uint64_t>(bits_ + outer_);
283 auto inner = Instructions::ctz(block_);
284 block_ = Instructions::blsr(block_);
286 return setValue(inner);
289 bool skip(SizeType n) {
292 if (!kUnchecked && position() + n >= size_) {
295 // Small skip optimization.
296 if (LIKELY(n < kLinearScanThreshold)) {
297 for (size_t i = 0; i < n; ++i) {
305 // Use forward pointer.
306 if (Encoder::forwardQuantum > 0 && n > Encoder::forwardQuantum) {
307 const size_t steps = position_ / Encoder::forwardQuantum;
308 const size_t dest = folly::loadUnaligned<SkipValueType>(
309 this->forwardPointers_ + (steps - 1) * sizeof(SkipValueType));
312 n = position_ + 1 - steps * Encoder::forwardQuantum;
316 // Find necessary block.
317 while ((cnt = Instructions::popcount(block_)) < n) {
319 outer_ += sizeof(uint64_t);
320 block_ = folly::loadUnaligned<uint64_t>(bits_ + outer_);
323 // Skip to the n-th one in the block.
325 auto inner = select64<Instructions>(block_, n - 1);
326 block_ &= (uint64_t(-1) << inner) << 1;
328 return setValue(inner);
331 bool skipTo(ValueType v) {
332 // Also works when value_ == kInvalidValue.
333 if (v != kInvalidValue) { DCHECK_GE(v + 1, value_ + 1); }
335 if (!kUnchecked && v > upperBound_) {
337 } else if (v == value_) {
341 // Small skip optimization.
342 if (v - value_ < kLinearScanThreshold) {
345 } while (value() < v);
350 if (Encoder::skipQuantum > 0 && v - value_ > Encoder::skipQuantum) {
351 size_t q = v / Encoder::skipQuantum;
352 auto skipPointer = folly::loadUnaligned<SkipValueType>(
353 this->skipPointers_ + (q - 1) * sizeof(SkipValueType));
354 position_ = static_cast<SizeType>(skipPointer) - 1;
356 reposition(q * Encoder::skipQuantum);
360 size_t outer = v / 64 * 8;
362 while (outer_ < outer) {
363 position_ += Instructions::popcount(block_);
364 outer_ += sizeof(uint64_t);
365 block_ = folly::loadUnaligned<uint64_t>(bits_ + outer_);
368 DCHECK_EQ(outer_, outer);
369 uint64_t mask = ~((uint64_t(1) << (v % 64)) - 1);
370 position_ += Instructions::popcount(block_ & ~mask) + 1;
373 while (block_ == 0) {
374 outer_ += sizeof(uint64_t);
375 block_ = folly::loadUnaligned<uint64_t>(bits_ + outer_);
378 auto inner = Instructions::ctz(block_);
379 block_ = Instructions::blsr(block_);
385 SizeType size() const {
390 return position() < size(); // Also checks that position() != -1.
393 SizeType position() const {
396 ValueType value() const {
401 bool jump(SizeType n) {
406 bool jumpTo(ValueType v) {
412 value_ = kInvalidValue;
418 constexpr static ValueType kInvalidValue =
419 std::numeric_limits<ValueType>::max(); // Must hold kInvalidValue + 1 == 0.
421 bool setValue(size_t inner) {
422 value_ = static_cast<ValueType>(8 * outer_ + inner);
426 void reposition(size_t dest) {
427 outer_ = dest / 64 * 8;
428 // We maintain the invariant that outer_ is divisible by 8.
429 block_ = folly::loadUnaligned<uint64_t>(bits_ + outer_);
430 block_ &= ~((uint64_t(1) << (dest % 64)) - 1);
433 constexpr static size_t kLinearScanThreshold = 4;
435 const uint8_t* const bits_;
442 ValueType upperBound_;