aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorDaniel Schemmel <daniel@schemmel.net>2022-10-13 14:23:39 +0100
committerFrank Busse <f.busse@imperial.ac.uk>2023-03-16 11:57:59 +0000
commit140e08ac039959ffc34adcbd8022b3d565f50ee8 (patch)
treee4d991c5249169bb0d5b425678b684960caf638d
parent5b49bd5999aabf51016a34acaefe905c313185c1 (diff)
downloadklee-140e08ac039959ffc34adcbd8022b3d565f50ee8.tar.gz
The KDAlloc loh allocator is useful for variable sized (large) allocations
-rw-r--r--include/klee/KDAlloc/suballocators/cow_ptr.h180
-rw-r--r--include/klee/KDAlloc/suballocators/loh.h346
-rw-r--r--include/klee/KDAlloc/suballocators/sized_regions.h589
3 files changed, 1115 insertions, 0 deletions
diff --git a/include/klee/KDAlloc/suballocators/cow_ptr.h b/include/klee/KDAlloc/suballocators/cow_ptr.h
new file mode 100644
index 00000000..20e12701
--- /dev/null
+++ b/include/klee/KDAlloc/suballocators/cow_ptr.h
@@ -0,0 +1,180 @@
+//===-- cow_ptr.h -----------------------------------------------*- C++ -*-===//
+//
+// The KLEE Symbolic Virtual Machine
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef KDALLOC_UTIL_COW_H
+#define KDALLOC_UTIL_COW_H
+
+#include <cassert>
+#include <cstddef>
+#include <cstdlib>
+#include <cstring>
+#include <new>
+#include <utility>
+
+namespace klee::kdalloc::suballocators {
+template <typename T> class CoWPtr {
+ struct Wrapper {
+ std::size_t referenceCount;
+ T data;
+
+ template <typename... V>
+ explicit Wrapper(std::size_t const referenceCount, V &&...args)
+ : referenceCount(referenceCount), data(std::forward<V>(args)...) {}
+ };
+
+ Wrapper *ptr = nullptr;
+
+public:
+ constexpr CoWPtr() = default;
+
+ CoWPtr(CoWPtr const &other) noexcept : ptr(other.ptr) {
+ if (ptr != nullptr) {
+ ++ptr->referenceCount;
+ assert(ptr->referenceCount > 1);
+ }
+ }
+
+ CoWPtr &operator=(CoWPtr const &other) {
+ if (this != &other) {
+ release();
+
+ ptr = other.ptr;
+ if (ptr != nullptr) {
+ ++ptr->referenceCount;
+ assert(ptr->referenceCount > 1);
+ }
+ }
+ return *this;
+ }
+
+ CoWPtr(CoWPtr &&other) noexcept : ptr(std::exchange(other.ptr, nullptr)) {}
+
+ CoWPtr &operator=(CoWPtr &&other) noexcept {
+ if (this != &other) {
+ release();
+ ptr = std::exchange(other.ptr, nullptr);
+ }
+ return *this;
+ }
+
+ /// A stand-in for C++17's `std::in_place_t`
+ struct in_place_t {};
+
+ template <typename... V>
+ explicit CoWPtr(in_place_t, V &&...args)
+ : ptr(new Wrapper(1, std::forward<V>(args)...)) {}
+
+ ~CoWPtr() { release(); }
+
+ /// Returns `true` iff `*this` is not in an empty state.
+ [[nodiscard]] explicit operator bool() const noexcept { return !isEmpty(); }
+
+ /// Returns `true` iff `*this` is in an empty state.
+ [[nodiscard]] bool isEmpty() const noexcept { return ptr == nullptr; }
+
+ /// Returns `true` iff `*this` is not in an empty state and owns the CoW
+ /// object.
+ [[nodiscard]] bool isOwned() const noexcept {
+ return ptr != nullptr && ptr->referenceCount == 1;
+ }
+
+ /// Accesses an existing object.
+ /// Must not be called when `*this` is in an empty state.
+ T const &operator*() const noexcept {
+ assert(ptr && "the `CoWPtr` must not be empty");
+ return get();
+ }
+
+ /// Accesses an existing object.
+ /// Must not be called when `*this` is in an empty state.
+ T const *operator->() const noexcept {
+ assert(ptr && "the `CoWPtr` must not be empty");
+ return &get();
+ }
+
+ /// Accesses an existing object.
+ /// Must not be called when `*this` is in an empty state.
+ T const &get() const noexcept {
+ assert(ptr && "the `CoWPtr` must not be empty");
+ return ptr->data;
+ }
+
+ /// Accesses an existing, owned object.
+ /// Must not be called when `*this` does not hold CoW ownership.
+ T &getOwned() noexcept {
+ assert(isOwned() && "the `CoWPtr` must be owned");
+ return ptr->data;
+ }
+
+ /// Accesses an existing, owned object.
+ /// Must not be called when `*this` does not hold CoW ownership.
+ ///
+ /// Note: This function is included for completeness' sake. Instead of calling
+ /// `getOwned` on a constant, one should probably just call `get` as it
+ /// produces the same result without requiring the target object to hold
+ /// ownership of the data.
+ T const &getOwned() const noexcept {
+ assert(isOwned() && "the `CoWPtr` must be owned");
+ return ptr->data;
+ }
+
+ /// Acquires CoW ownership of an existing object and returns a reference to
+ /// the owned object. Must not be called when `*this` is in an empty state.
+ T &acquire() {
+ assert(ptr != nullptr &&
+ "May only call `acquire` for an active CoW object");
+ assert(ptr->referenceCount > 0);
+ if (ptr->referenceCount > 1) {
+ --ptr->referenceCount;
+ ptr = new Wrapper(*ptr);
+ ptr->referenceCount = 1;
+ }
+ assert(ptr->referenceCount == 1);
+ return ptr->data;
+ }
+
+ /// Releases CoW ownership of an existing object or does nothing if the object
+ /// does not exist. Leaves `*this` in an empty state.
+ void release() noexcept(noexcept(delete ptr)) {
+ if (ptr != nullptr) {
+ --ptr->referenceCount;
+ if (ptr->referenceCount == 0) {
+ delete ptr;
+ }
+ ptr = nullptr;
+ }
+ }
+
+ /// Leaves `*this` holding and owning an object `T(std::forward<V>(args)...)`.
+ /// This function is always safe to call and will perform the operation as
+ /// efficient as possible. Notably, `foo.emplace(...)` may be more efficient
+ /// than the otherwise equivalent `foo = CoWPtr(CoWPtr::in_place_t{}, ...)`.
+ template <typename... V> T &emplace(V &&...args) {
+ if (ptr) {
+ if (ptr->referenceCount == 1) {
+ ptr->data = T(std::forward<V>(args)...);
+ } else {
+ auto *new_ptr = new Wrapper(
+ 1, std::forward<V>(args)...); // possibly throwing operation
+
+ assert(ptr->referenceCount > 1);
+ --ptr->referenceCount;
+ ptr = new_ptr;
+ }
+ } else {
+ ptr = new Wrapper(1, std::forward<V>(args)...);
+ }
+ assert(ptr != nullptr);
+ assert(ptr->referenceCount == 1);
+ return ptr->data;
+ }
+};
+} // namespace klee::kdalloc::suballocators
+
+#endif
diff --git a/include/klee/KDAlloc/suballocators/loh.h b/include/klee/KDAlloc/suballocators/loh.h
new file mode 100644
index 00000000..62386182
--- /dev/null
+++ b/include/klee/KDAlloc/suballocators/loh.h
@@ -0,0 +1,346 @@
+//===-- loh.h ---------------------------------------------------*- C++ -*-===//
+//
+// The KLEE Symbolic Virtual Machine
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef KDALLOC_SUBALLOCATORS_LOH_H
+#define KDALLOC_SUBALLOCATORS_LOH_H
+
+#include "../define.h"
+#include "../location_info.h"
+#include "../tagged_logger.h"
+#include "sized_regions.h"
+
+#include "klee/ADT/Bits.h"
+
+#include <cassert>
+#include <cstddef>
+#include <functional>
+#include <ostream>
+#include <utility>
+#include <vector>
+
+#if KDALLOC_TRACE >= 2
+#include <iostream>
+#endif
+
+namespace klee::kdalloc::suballocators {
+/// The large object heap is implemented as what amounts to as a bi-directional
+/// mapping between the position of each unallocated region and its actual size.
+/// The implemented algorithm performs allocations in the middle of the largest
+/// available unallocated region. Allocations are guaranteed to be aligned to
+/// 4096 bytes.
+class LargeObjectAllocator final : public TaggedLogger<LargeObjectAllocator> {
+ struct Data final {
+ /// The reference count.
+ std::size_t referenceCount = 1;
+
+ /// A CoW enabled treap that is a tree over base addresses and a max-heap
+ /// over sizes
+ SizedRegions regions;
+
+ static_assert(
+ sizeof(void *) >= sizeof(std::size_t),
+ "The quarantine elements are alternating `void*` and `std::size_t`");
+ static_assert(
+ alignof(void *) >= alignof(std::size_t),
+ "The quarantine elements are alternating `void*` and `std::size_t`");
+ using QuarantineElement =
+ std::aligned_storage_t<sizeof(void *), alignof(void *)>;
+
+ /// The quarantine position followed by the quarantine ring buffer
+ /// Structure: [pos, ptr1, size1, ptr2, size2, ...]
+ QuarantineElement quarantine[];
+
+ Data() = default;
+ Data(Data const &rhs) : regions(rhs.regions) {}
+ Data &operator=(Data const &) = delete;
+ Data(Data &&) = delete;
+ Data &operator=(Data &&) = delete;
+ };
+
+public:
+ class Control final : public TaggedLogger<Control> {
+ friend class LargeObjectAllocator;
+
+ void *baseAddress = nullptr;
+ std::size_t size = 0;
+ std::uint32_t quarantineSize = 0;
+ bool unlimitedQuarantine = false;
+
+ public:
+ Control() = default;
+ Control(Control const &) = delete;
+ Control &operator=(Control const &) = delete;
+ Control(Control &&) = delete;
+ Control &operator=(Control &&) = delete;
+
+ void initialize(void *const baseAddress, std::size_t const size,
+ bool const unlimitedQuarantine,
+ std::uint32_t const quarantineSize) {
+ assert(size >= 3 * 4096 && "The LOH requires at least three pages");
+
+ this->baseAddress = baseAddress;
+ this->size = size;
+ if (unlimitedQuarantine) {
+ this->quarantineSize = 0;
+ this->unlimitedQuarantine = true;
+ } else {
+ this->quarantineSize = quarantineSize == 0 ? 0 : quarantineSize * 2 + 1;
+ this->unlimitedQuarantine = false;
+ }
+
+ traceLine("Initialization complete for region ", baseAddress, " + ",
+ size);
+ }
+
+ inline std::ostream &logTag(std::ostream &out) const noexcept {
+ return out << "[LOH control] ";
+ }
+
+ constexpr void *mapping_begin() const noexcept { return baseAddress; }
+ constexpr void *mapping_end() const noexcept {
+ return static_cast<void *>(static_cast<char *>(baseAddress) + size);
+ }
+ };
+
+private:
+ Data *data = nullptr;
+
+ inline void releaseData() noexcept {
+ if (data) {
+ --data->referenceCount;
+ if (data->referenceCount == 0) {
+ data->~Data();
+ std::free(data);
+ }
+ data = nullptr;
+ }
+ }
+
+ inline void acquireData(Control const &control) noexcept {
+ assert(!!data);
+ if (data->referenceCount > 1) {
+ auto newData = static_cast<Data *>(std::malloc(
+ sizeof(Data) + control.quarantineSize * sizeof(std::size_t)));
+ assert(newData && "allocation failure");
+
+ new (newData) Data(*data);
+ std::memcpy(&newData->quarantine[0], &data->quarantine[0],
+ sizeof(Data::QuarantineElement) * control.quarantineSize);
+
+ --data->referenceCount;
+ assert(data->referenceCount > 0);
+ data = newData;
+ }
+ assert(data->referenceCount == 1);
+ }
+
+ std::pair<void *, std::size_t>
+ quarantine(Control const &control, void *const ptr, std::size_t const size) {
+ assert(!!data &&
+ "Deallocations can only happen if allocations happened beforehand");
+ assert(!control.unlimitedQuarantine);
+
+ if (control.quarantineSize == 0) {
+ return {ptr, size};
+ }
+
+ assert(data->referenceCount == 1 &&
+ "Must hold CoW ownership to quarantine a new pointer+size pair");
+
+ auto const pos = reinterpret_cast<std::size_t &>(data->quarantine[0]);
+ if (pos + 2 == control.quarantineSize) {
+ reinterpret_cast<std::size_t &>(data->quarantine[0]) = 1;
+ } else {
+ reinterpret_cast<std::size_t &>(data->quarantine[0]) = pos + 2;
+ }
+
+ auto quarantinedPtr = std::exchange(
+ reinterpret_cast<void *&>(data->quarantine[pos]), std::move(ptr));
+ auto quarantinedSize = std::exchange(
+ reinterpret_cast<std::size_t &>(data->quarantine[pos + 1]),
+ std::move(size));
+ return {quarantinedPtr, quarantinedSize};
+ }
+
+public:
+ constexpr LargeObjectAllocator() = default;
+
+ LargeObjectAllocator(LargeObjectAllocator const &rhs) noexcept
+ : data(rhs.data) {
+ if (data) {
+ ++data->referenceCount;
+ assert(data->referenceCount > 1);
+ }
+ }
+
+ LargeObjectAllocator &operator=(LargeObjectAllocator const &rhs) noexcept {
+ if (data != rhs.data) {
+ releaseData();
+ data = rhs.data;
+ if (data) {
+ ++data->referenceCount;
+ assert(data->referenceCount > 1);
+ }
+ }
+ return *this;
+ }
+
+ LargeObjectAllocator(LargeObjectAllocator &&rhs) noexcept
+ : data(std::exchange(rhs.data, nullptr)) {
+ assert(data->referenceCount > 0);
+ }
+
+ LargeObjectAllocator &operator=(LargeObjectAllocator &&rhs) noexcept {
+ if (data != rhs.data) {
+ releaseData();
+ data = std::exchange(rhs.data, nullptr);
+ }
+ return *this;
+ }
+
+ ~LargeObjectAllocator() noexcept { releaseData(); }
+
+ inline std::ostream &logTag(std::ostream &out) const noexcept {
+ return out << "[LOH] ";
+ }
+
+ LocationInfo getLocationInfo(Control const &control, void const *const ptr,
+ std::size_t const size) const noexcept {
+ assert(control.mapping_begin() <= ptr &&
+ reinterpret_cast<char const *>(ptr) + size < control.mapping_end() &&
+ "This property should have been ensured by the caller");
+
+ if (!data) {
+ return LocationInfo::LI_Unallocated;
+ }
+ assert(!data->regions.isEmpty());
+
+ if (reinterpret_cast<char const *>(control.mapping_begin()) + 4096 >
+ reinterpret_cast<char const *>(ptr) ||
+ reinterpret_cast<char const *>(control.mapping_end()) - 4096 <
+ reinterpret_cast<char const *>(ptr) + size) {
+ return LocationInfo::LI_AlwaysRedzone;
+ }
+
+ return data->regions.getLocationInfo(reinterpret_cast<char const *>(ptr),
+ size);
+ }
+
+ [[nodiscard]] void *allocate(Control const &control, std::size_t size) {
+ if (!data) {
+ data = static_cast<Data *>(std::malloc(
+ sizeof(Data) + control.quarantineSize * sizeof(std::size_t)));
+ assert(data && "allocation failure");
+
+ new (data) Data();
+ if (control.quarantineSize > 0) {
+ reinterpret_cast<std::size_t &>(data->quarantine[0]) = 1;
+ std::memset(&data->quarantine[1], 0,
+ (control.quarantineSize - 1) *
+ sizeof(Data::QuarantineElement));
+ }
+
+ data->regions.insert(static_cast<char *>(control.baseAddress),
+ control.size);
+ } else {
+ acquireData(control);
+ }
+ assert(data->referenceCount == 1);
+
+ auto const quantizedSize = roundUpToMultipleOf4096(size);
+ traceLine("Allocating ", size, " (", quantizedSize, ") bytes");
+ assert(size > 0);
+ assert(quantizedSize % 4096 == 0);
+ traceContents(control);
+
+ size = quantizedSize;
+
+ assert(!data->regions.isEmpty());
+ auto const &node = data->regions.getLargestRegion();
+ std::size_t const rangeSize = node->getSize();
+ assert(rangeSize + 2 * 4096 >= size && "Zero (or below) red zone size!");
+ char *const rangePos = node->getBaseAddress();
+
+ auto offset = (rangeSize - size) / 2;
+ offset = roundUpToMultipleOf4096(offset);
+
+ // left subrange
+ data->regions.resizeLargestRegion(offset);
+
+ // right subrange
+ data->regions.insert(rangePos + offset + size, rangeSize - offset - size);
+
+ auto const result = static_cast<void *>(rangePos + offset);
+ traceLine("Allocation result: ", result);
+ return result;
+ }
+
+ void deallocate(Control const &control, void *ptr, std::size_t size) {
+ assert(!!data &&
+ "Deallocations can only happen if allocations happened beforehand");
+
+ if (control.unlimitedQuarantine) {
+ traceLine("Quarantining ", ptr, " for ever");
+ } else {
+ auto quantizedSize = roundUpToMultipleOf4096(size);
+ traceLine("Quarantining ", ptr, " with size ", size, " (", quantizedSize,
+ ") for ", (control.quarantineSize - 1) / 2, " deallocations");
+ assert(size > 0);
+ assert(quantizedSize % 4096 == 0);
+
+ acquireData(control); // we will need quarantine and/or region ownership
+ std::tie(ptr, size) = quarantine(control, ptr, size);
+ if (!ptr) {
+ return;
+ }
+
+ quantizedSize = roundUpToMultipleOf4096(size);
+ traceLine("Deallocating ", ptr, " with size ", size, " (", quantizedSize,
+ ")");
+
+ assert(data->referenceCount == 1);
+ assert(size > 0);
+ assert(quantizedSize % 4096 == 0);
+ size = quantizedSize;
+ traceContents(control);
+ traceLine("Merging region at ",
+ static_cast<void *>(static_cast<char *>(ptr) + size),
+ " with the preceding region");
+
+ data->regions.mergeRegionWithPrevious(static_cast<char *>(ptr) + size);
+ }
+ }
+
+ void traceContents(Control const &) const noexcept {
+ if (data->regions.isEmpty()) {
+ traceLine("regions is empty");
+ } else {
+#if KDALLOC_TRACE >= 2
+ traceLine("regions:");
+ data->regions.dump(std::cout);
+
+#if !defined(NDEBUG)
+ auto invariantsHold = data->regions.checkInvariants();
+ if (!invariantsHold.first) {
+ traceln("TREE INVARIANT DOES NOT HOLD!");
+ }
+ if (!invariantsHold.second) {
+ traceln("HEAP INVARIANT DOES NOT HOLD!");
+ }
+ assert(invariantsHold.first && invariantsHold.second);
+#endif
+#else
+ traceLine("regions is not empty");
+#endif
+ }
+ }
+};
+} // namespace klee::kdalloc::suballocators
+
+#endif
diff --git a/include/klee/KDAlloc/suballocators/sized_regions.h b/include/klee/KDAlloc/suballocators/sized_regions.h
new file mode 100644
index 00000000..7adeea45
--- /dev/null
+++ b/include/klee/KDAlloc/suballocators/sized_regions.h
@@ -0,0 +1,589 @@
+//===-- sized_regions.h -----------------------------------------*- C++ -*-===//
+//
+// The KLEE Symbolic Virtual Machine
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef KDALLOC_UTIL_SIZED_REGIONS_H
+#define KDALLOC_UTIL_SIZED_REGIONS_H
+
+#include "../define.h"
+#include "../location_info.h"
+#include "cow_ptr.h"
+
+#include <cassert>
+#include <cstddef>
+#include <limits>
+#include <string>
+#include <utility>
+
+namespace klee::kdalloc::suballocators {
+class SizedRegions;
+
+class Node final {
+ friend class SizedRegions;
+
+ CoWPtr<Node> lhs;
+ CoWPtr<Node> rhs;
+
+ // std::size_t _precomputed_hash;
+ char *baseAddress;
+ std::size_t size;
+
+ inline std::uintptr_t hash() const noexcept {
+ // This hash function implements a very simple hash based only on the base
+ // address. The hash is (only) used to break ties w.r.t. the heap property.
+ // Note that multiplication with an odd number (as done here) modulo a power
+ // of two (the domain of std::size_t) is a permutation, meaning that the
+ // hash is guaranteed to break any ties (as tree-keys are unique in the
+ // treap). The constant is chosen in such a way that the high bits of the
+ // result depend each byte of input while still enabling a fast computation
+ // due to the low number of 1-bits involved. It is also chosen odd to ensure
+ // co-primality with powers of two.
+
+ static_assert(
+ std::numeric_limits<std::uintptr_t>::digits <= 64,
+ "Please choose a more appropriate constant in the line below.");
+ return reinterpret_cast<std::uintptr_t>(baseAddress) *
+ static_cast<std::uintptr_t>(0x8080'8080'8080'8081u);
+ }
+
+public:
+ char *const &getBaseAddress() const noexcept { return baseAddress; }
+ void setBaseAddress(char *const newBaseAddress) noexcept {
+ baseAddress = newBaseAddress;
+ }
+
+ std::size_t const &getSize() const noexcept { return size; }
+ void setSize(std::size_t const newSize) noexcept { size = newSize; }
+
+ Node(char *const baseAddress, std::size_t const size) noexcept
+ : baseAddress(baseAddress), size(size) {}
+};
+
+class SizedRegions {
+ CoWPtr<Node> root;
+
+ static void insertRec(CoWPtr<Node> &treap, CoWPtr<Node> &&region) {
+ assert(treap && "target subtreap must exist (insertion at the call "
+ "site is trivial otherwise)");
+ assert(region && "region to be inserted must exist");
+ assert(treap->getBaseAddress() != region->getBaseAddress() &&
+ "multiple insertions of the same tree-key are not supported");
+
+ auto &node = treap.acquire();
+ if (region->getBaseAddress() < node.getBaseAddress()) {
+ if (node.lhs) {
+ insertRec(node.lhs, std::move(region));
+ } else {
+ node.lhs = std::move(region);
+ }
+ if (node.lhs->getSize() > node.getSize() ||
+ (node.lhs->getSize() == node.getSize() &&
+ node.lhs->hash() > node.hash())) {
+ auto temp = std::move(node.lhs);
+ auto &nodeTemp = temp.acquire();
+ node.lhs = std::move(nodeTemp.rhs);
+ nodeTemp.rhs = std::move(treap);
+ treap = std::move(temp);
+ }
+ } else {
+ if (node.rhs) {
+ insertRec(node.rhs, std::move(region));
+ } else {
+ node.rhs = std::move(region);
+ }
+ if (node.rhs->getSize() > node.getSize() ||
+ (node.rhs->getSize() == node.getSize() &&
+ node.rhs->hash() > node.hash())) {
+ auto temp = std::move(node.rhs);
+ auto &nodeTemp = temp.acquire();
+ node.rhs = std::move(nodeTemp.lhs);
+ nodeTemp.lhs = std::move(treap);
+ treap = std::move(temp);
+ }
+ }
+ }
+
+ static void mergeTreaps(CoWPtr<Node> *target, CoWPtr<Node> lhs,
+ CoWPtr<Node> rhs) {
+ assert(!*target && "empty the target first");
+ assert(
+ (lhs || rhs) &&
+ "merging two empty subtreaps should be handled by hand, as it does not "
+ "require "
+ "acquisition of the object holding `*target` (usually a parent node)");
+ for (;;) {
+ assert(!*target);
+ if (!lhs) {
+ *target = std::move(rhs);
+ break;
+ }
+ if (!rhs) {
+ *target = std::move(lhs);
+ break;
+ }
+ assert(lhs->getBaseAddress() < rhs->getBaseAddress() &&
+ "tree property violated");
+ if (lhs->getSize() > rhs->getSize() ||
+ (lhs->getSize() == rhs->getSize() && lhs->hash() > rhs->hash())) {
+ // Ties w.r.t. the heap property (size) are explicitly broken using a
+ // hash derived from the tree key. At the time of writing, the "hash"
+ // function is a permutation (if `std::size_t` and `std::uintptr_t` are
+ // of the same size), which means that hash collisions are not just
+ // unlikely, but rather impossible.
+ Node &lhsNode = lhs.acquire();
+ *target = std::move(lhs);
+ lhs = std::move(lhsNode.rhs);
+ target = &lhsNode.rhs;
+ } else {
+ Node &rhsNode = rhs.acquire();
+ *target = std::move(rhs);
+ rhs = std::move(rhsNode.lhs);
+ target = &rhsNode.lhs;
+ }
+ }
+ assert(!lhs && "lhs must have been consumed");
+ assert(!rhs && "rhs must have been consumed");
+ }
+
+public:
+ constexpr SizedRegions() = default;
+ SizedRegions(SizedRegions const &other) = default;
+ SizedRegions &operator=(SizedRegions const &other) = default;
+ SizedRegions(SizedRegions &&other) = default;
+ SizedRegions &operator=(SizedRegions &&other) = default;
+
+ [[nodiscard]] bool isEmpty() const noexcept { return !root; }
+
+ /// Computes the LocationInfo. This functionality really belongs to the
+ /// `LargeObjectAllocator`, as it assumes that this treap contains free
+ /// regions in between allocations. It also knows that there is a redzone at
+ /// the beginning and end and in between any two allocations.
+ inline LocationInfo getLocationInfo(char const *const address,
+ std::size_t const size) const noexcept {
+ assert(root && "Cannot compute location_info for an empty treap");
+
+ Node const *currentNode = &*root;
+ Node const *closestPredecessor = nullptr;
+ for (;;) {
+ if (currentNode->getBaseAddress() <= address) {
+ if (address < currentNode->getBaseAddress() + currentNode->getSize()) {
+ if (address + size <=
+ currentNode->getBaseAddress() + currentNode->getSize()) {
+ return LocationInfo::LI_Unallocated;
+ } else {
+ return LocationInfo::LI_Unaligned;
+ }
+ } else {
+ closestPredecessor = currentNode;
+ if (!currentNode->rhs) {
+ return {LocationInfo::LI_AllocatedOrQuarantined,
+ closestPredecessor->getBaseAddress() +
+ closestPredecessor->getSize()};
+ }
+ currentNode = &*currentNode->rhs;
+ }
+ } else {
+ assert(closestPredecessor &&
+ "If there is no closest predecessor, there must not be a "
+ "predecessor at all. Since regions are only ever split, "
+ "this would mean that the lookup is outside the valid "
+ "range, which has to be handled by the caller.");
+ if (currentNode->getBaseAddress() < address + size) {
+ return LocationInfo::LI_Unaligned;
+ }
+ if (!currentNode->lhs) {
+ return {LocationInfo::LI_AllocatedOrQuarantined,
+ closestPredecessor->getBaseAddress() +
+ closestPredecessor->getSize()};
+ }
+ currentNode = &*currentNode->lhs;
+ }
+ }
+ }
+
+ void insert(char *const baseAddress, std::size_t const size) {
+ assert(size > 0 && "region to be inserted must contain at least one byte");
+ insert(CoWPtr<Node>(CoWPtr<Node>::in_place_t{}, baseAddress, size));
+ }
+
+ void insert(CoWPtr<Node> region) {
+ assert(region && "region to be inserted must exist");
+ assert(region->getSize() > 0 &&
+ "region to be inserted must contain at least one byte");
+ if (region->lhs || region->rhs) {
+ if (region.isOwned()) {
+ auto &node = region.acquire();
+ node.lhs.release();
+ node.rhs.release();
+ } else {
+ // If region is not owned, then acquiring it would copy the `lhs` and
+ // `rhs` members, thereby incrementing and decrementing the refcounts in
+ // unrelated objects. To avoid this, we simply recreate the region.
+ region = CoWPtr<Node>(CoWPtr<Node>::in_place_t{},
+ region->getBaseAddress(), region->getSize());
+ }
+ }
+ assert(!region->lhs);
+ assert(!region->rhs);
+
+ auto *target = &root;
+ while (*target && ((*target)->getSize() > region->getSize() ||
+ ((*target)->getSize() == region->getSize() &&
+ (*target)->hash() > region->hash()))) {
+ auto &node = target->acquire();
+ assert(node.getBaseAddress() != region->getBaseAddress() &&
+ "multiple insertions of the same tree-key are not supported");
+ target = region->getBaseAddress() < node.getBaseAddress() ? &node.lhs
+ : &node.rhs;
+ }
+ if (!*target) {
+ *target = std::move(region);
+ } else {
+ insertRec(*target, std::move(region));
+ }
+ }
+
+ [[nodiscard]] CoWPtr<Node> const &getLargestRegion() const noexcept {
+ assert(root && "querying the largest region only makes sense if the "
+ "treap is not empty");
+ return root;
+ }
+
+ CoWPtr<Node> extractLargestRegion() {
+ assert(root && "cannot extract the largest region of an empty treap");
+
+ auto result = std::move(root);
+ if (result->lhs || result->rhs) {
+ auto &node = result.acquire();
+ mergeTreaps(&root, std::move(node.lhs), std::move(node.rhs));
+ }
+ return result;
+ }
+
+private:
+ static void pushDownRightHeap(CoWPtr<Node> *target, CoWPtr<Node> update,
+ CoWPtr<Node> rhs, std::size_t const newSize,
+ std::uintptr_t const newHash) {
+ while (rhs && (newSize < rhs->getSize() ||
+ (newSize == rhs->getSize() && newHash < rhs->hash()))) {
+ // optimization opportunity: once lhs does not exist anymore, it will
+ // never exist in the future
+ *target = std::move(rhs);
+ auto &targetNode = target->acquire();
+ rhs = std::move(targetNode.lhs);
+ target = &targetNode.lhs;
+ }
+
+ update.getOwned().rhs = std::move(rhs);
+ *target = std::move(update);
+ }
+
+ static void pushDownLeftHeap(CoWPtr<Node> *target, CoWPtr<Node> update,
+ CoWPtr<Node> lhs, std::size_t const newSize,
+ std::uintptr_t const newHash) {
+ while (lhs && (newSize < lhs->getSize() ||
+ (newSize == lhs->getSize() && newHash < lhs->hash()))) {
+ // optimization opportunity: once lhs does not exist anymore, it will
+ // never exist in the future
+ *target = std::move(lhs);
+ auto &targetNode = target->acquire();
+ lhs = std::move(targetNode.rhs);
+ target = &targetNode.rhs;
+ }
+
+ update.getOwned().lhs = std::move(lhs);
+ *target = std::move(update);
+ }
+
+public:
+ void resizeLargestRegion(std::size_t const newSize) {
+ assert(root && "updating the largest region only makes sense if the "
+ "treap is not empty");
+
+ auto update = std::move(root);
+ auto &node = update.acquire();
+ node.setSize(newSize);
+ auto const newHash = node.hash();
+ auto lhs = std::move(node.lhs);
+ auto rhs = std::move(node.rhs);
+
+ auto *target = &root;
+
+ if (!lhs || !(newSize < lhs->getSize() ||
+ (newSize == lhs->getSize() && newHash < lhs->hash()))) {
+ node.lhs = std::move(lhs);
+ pushDownRightHeap(target, std::move(update), std::move(rhs), newSize,
+ newHash);
+ } else if (!rhs ||
+ !(newSize < rhs->getSize() ||
+ (newSize == rhs->getSize() && newHash < rhs->hash()))) {
+ node.rhs = std::move(rhs);
+ pushDownLeftHeap(target, std::move(update), std::move(lhs), newSize,
+ newHash);
+ } else {
+ for (;;) {
+ assert(lhs && (newSize < lhs->getSize() ||
+ (newSize == lhs->getSize() && newHash < lhs->hash())));
+ assert(rhs && (newSize < rhs->getSize() ||
+ (newSize == rhs->getSize() && newHash < rhs->hash())));
+
+ if (lhs->getSize() < rhs->getSize() ||
+ (lhs->getSize() == rhs->getSize() && lhs->hash() < rhs->hash())) {
+ *target = std::move(rhs);
+ auto &targetNode = target->acquire();
+ rhs = std::move(targetNode.lhs);
+ target = &targetNode.lhs;
+
+ if (!rhs || !(newSize < rhs->getSize() ||
+ (newSize == rhs->getSize() && newHash < rhs->hash()))) {
+ node.rhs = std::move(rhs);
+ pushDownLeftHeap(target, std::move(update), std::move(lhs), newSize,
+ newHash);
+ return;
+ }
+ } else {
+ *target = std::move(lhs);
+ auto &targetNode = target->acquire();
+ lhs = std::move(targetNode.rhs);
+ target = &targetNode.rhs;
+
+ if (!lhs || !(newSize < lhs->getSize() ||
+ (newSize == lhs->getSize() && newHash < lhs->hash()))) {
+ node.lhs = std::move(lhs);
+ pushDownRightHeap(target, std::move(update), std::move(rhs),
+ newSize, newHash);
+ return;
+ }
+ }
+ }
+ }
+ }
+
+ CoWPtr<Node> extractRegion(char const *const baseAddress) {
+ CoWPtr<Node> *target = &root;
+ for (;;) {
+ assert(*target && "cannot extract region that is not part of the treap");
+ if ((*target)->getBaseAddress() < baseAddress) {
+ target = &target->acquire().rhs;
+ } else if ((*target)->getBaseAddress() > baseAddress) {
+ target = &target->acquire().lhs;
+ } else {
+ assert((*target)->getBaseAddress() == baseAddress);
+ auto result = std::move(*target);
+ if (result->lhs || result->rhs) {
+ auto &node = result.acquire();
+ mergeTreaps(target, std::move(node.lhs), std::move(node.rhs));
+ }
+ return result;
+ }
+ }
+ }
+
+private:
+ static CoWPtr<Node> mergeRegionWithPreviousRec(CoWPtr<Node> &treap,
+ char *const baseAddress) {
+ assert(treap && "cannot extract region that is not part of the treap");
+ if (baseAddress < treap->getBaseAddress()) {
+ auto &node = treap.acquire();
+ if (auto greater = mergeRegionWithPreviousRec(node.lhs, baseAddress)) {
+ if (node.getBaseAddress() < greater->getBaseAddress()) {
+ auto newSize = static_cast<std::size_t>(greater->getBaseAddress() -
+ node.getBaseAddress() +
+ greater->getSize());
+ assert(
+ newSize > node.getSize() &&
+ "Sizes only get greater by adding `greater - smaller` to them");
+ node.setSize(newSize);
+ return {};
+ } else {
+ return greater;
+ }
+ } else {
+ if (node.lhs->getSize() > node.getSize() ||
+ (node.lhs->getSize() == node.getSize() &&
+ node.lhs->hash() > node.hash())) {
+ auto temp = std::move(node.lhs);
+ auto &nodeTemp = temp.getOwned();
+ node.lhs = std::move(nodeTemp.rhs);
+ nodeTemp.rhs = std::move(treap);
+ treap = std::move(temp);
+ }
+ return {};
+ }
+ } else if (treap->getBaseAddress() < baseAddress) {
+ auto &node = treap.acquire();
+ if (auto greater = mergeRegionWithPreviousRec(node.rhs, baseAddress)) {
+ if (node.getBaseAddress() < greater->getBaseAddress()) {
+ auto newSize = static_cast<std::size_t>(greater->getBaseAddress() -
+ node.getBaseAddress() +
+ greater->getSize());
+ assert(
+ newSize > node.getSize() &&
+ "Sizes only get greater by adding `greater - smaller` to them");
+ node.setSize(newSize);
+ return {};
+ } else {
+ return greater;
+ }
+ } else {
+ if (node.rhs->getSize() > node.getSize() ||
+ (node.rhs->getSize() == node.getSize() &&
+ node.rhs->hash() > node.hash())) {
+ auto temp = std::move(node.rhs);
+ auto &nodeTemp = temp.getOwned();
+ node.rhs = std::move(nodeTemp.lhs);
+ nodeTemp.lhs = std::move(treap);
+ treap = std::move(temp);
+ }
+ return greater;
+ }
+ } else {
+ assert(treap->getBaseAddress() == baseAddress);
+ // target is now the greater (w.r.t. the tree key) of the two regions we
+ // are looking for
+ if (treap->lhs) {
+ CoWPtr<Node> lhs, rhs;
+ if (treap.isOwned()) {
+ auto &node = treap.acquire();
+ lhs = std::move(node.lhs);
+ rhs = std::move(node.rhs);
+ } else {
+ lhs = treap->lhs;
+ rhs = treap->rhs;
+ }
+ auto const greaterBaseAddress = treap->getBaseAddress();
+ auto const greaterSize = treap->getSize();
+
+ auto *target = &lhs;
+ while ((*target)->rhs) {
+ target = &target->acquire().rhs;
+ }
+ treap = std::move(*target);
+ auto &node = treap.acquire();
+ *target = std::move(node.lhs);
+
+ assert(greaterBaseAddress > node.getBaseAddress());
+ auto newSize = static_cast<std::size_t>(
+ greaterBaseAddress - node.getBaseAddress() + greaterSize);
+ assert(newSize > node.getSize() &&
+ "Sizes only get greater by adding `greater - smaller` to them");
+ node.setSize(newSize);
+
+ assert(!node.rhs && "We looked for a node that has no rhs");
+ assert((!rhs || node.getBaseAddress() < rhs->getBaseAddress()) &&
+ "`lesser` comes from the left subtree of `greater`, while "
+ "rhs is the right subtree of `greater`");
+ assert((!rhs || node.getSize() > rhs->getSize()) &&
+ "The old size of `greater` was >= that of its right subtree, "
+ "so the new size (of `lesser`) is strictly greater");
+ node.rhs = std::move(rhs);
+
+ assert(!node.lhs && "The lhs of this node has already been removed");
+ assert((!lhs || lhs->getBaseAddress() < node.getBaseAddress()) &&
+ "`lesser` is the greatest child of the `lhs` subtree");
+ assert((!lhs || node.getSize() > lhs->getSize()) &&
+ "The old size of `greater` was >= that of its left subtree, "
+ "so the new size (of `lesser`) is strictly greater");
+ node.lhs = std::move(lhs);
+ return {};
+ } else {
+ auto greater = std::move(treap);
+ if (greater->rhs) {
+ if (greater.isOwned()) {
+ treap = std::move(greater.acquire().rhs);
+ } else {
+ treap = greater->rhs;
+ }
+ }
+ return greater; // no move to enable copy elision
+ }
+ }
+ }
+
+public:
+ /// This function merges the region starting at the given address, with the
+ /// region immediately preceding it. Both regions must exist. The resulting
+ /// region has the same base address as the lesser region with its size large
+ /// enough to cover the space to the end of the greater region (filling any
+ /// holes in the process).
+ inline void mergeRegionWithPrevious(char *const baseAddress) {
+ assert(root && "An empty treap holds no regions to merge");
+ mergeRegionWithPreviousRec(root, baseAddress);
+ }
+
+private:
+ template <typename OutStream>
+ static void dumpRec(OutStream &out, std::string &prefix,
+ CoWPtr<Node> const &treap) {
+ if (treap) {
+ out << "[" << static_cast<void *>(treap->getBaseAddress()) << "; "
+ << static_cast<void *>(treap->getBaseAddress() + treap->getSize())
+ << ") of size " << treap->getSize() << "\n";
+
+ auto oldPrefix = prefix.size();
+
+ out << prefix << "├";
+ prefix += "│";
+ dumpRec(out, prefix, treap->lhs);
+
+ prefix.resize(oldPrefix);
+
+ out << prefix << "╰";
+ prefix += " ";
+ dumpRec(out, prefix, treap->rhs);
+ } else {
+ out << "<nil>\n";
+ }
+ }
+
+public:
+ template <typename OutStream> OutStream &dump(OutStream &out) {
+ std::string prefix;
+ dumpRec(out, prefix, root);
+ return out;
+ }
+
+private:
+ static void checkInvariants(std::pair<bool, bool> &result,
+ CoWPtr<Node> const &treap) {
+ if (!result.first && !result.second) {
+ return;
+ }
+
+ if (treap->lhs) {
+ if (treap->lhs->getBaseAddress() >= treap->getBaseAddress()) {
+ result.first = false;
+ }
+ if (treap->lhs->getSize() > treap->getSize()) {
+ result.second = false;
+ }
+ checkInvariants(result, treap->lhs);
+ }
+ if (treap->rhs) {
+ if (treap->rhs->getBaseAddress() <= treap->getBaseAddress()) {
+ result.first = false;
+ }
+ if (treap->rhs->getSize() > treap->getSize()) {
+ result.second = false;
+ }
+ checkInvariants(result, treap->rhs);
+ }
+ }
+
+public:
+ std::pair<bool, bool> checkInvariants() const {
+ std::pair<bool, bool> result = {true, true};
+ if (root) {
+ checkInvariants(result, root);
+ }
+ return result;
+ }
+};
+} // namespace klee::kdalloc::suballocators
+
+#endif