about summary refs log tree commit diff homepage
diff options
context:
space:
mode:
-rw-r--r--lib/Core/AddressSpace.cpp28
-rw-r--r--lib/Core/AddressSpace.h6
-rw-r--r--lib/Core/ExecutionState.cpp24
-rw-r--r--lib/Core/ExecutionState.h12
-rw-r--r--lib/Core/Executor.cpp141
-rw-r--r--lib/Core/Executor.h9
-rw-r--r--lib/Core/Memory.h5
-rw-r--r--lib/Core/MemoryManager.cpp321
-rw-r--r--lib/Core/MemoryManager.h27
-rw-r--r--test/Feature/VarArg.c2
10 files changed, 459 insertions, 116 deletions
diff --git a/lib/Core/AddressSpace.cpp b/lib/Core/AddressSpace.cpp
index f3462ca1..ab3bbb8c 100644
--- a/lib/Core/AddressSpace.cpp
+++ b/lib/Core/AddressSpace.cpp
@@ -297,19 +297,25 @@ bool AddressSpace::resolve(ExecutionState &state, TimingSolver *solver,
 // transparently avoid screwing up symbolics (if the byte is symbolic
 // then its concrete cache byte isn't being used) but is just a hack.
 
-void AddressSpace::copyOutConcretes() {
-  for (MemoryMap::iterator it = objects.begin(), ie = objects.end(); 
-       it != ie; ++it) {
-    const MemoryObject *mo = it->first;
-
-    if (!mo->isUserSpecified) {
-      const auto &os = it->second;
-      auto address = reinterpret_cast<std::uint8_t*>(mo->address);
-
-      if (!os->readOnly)
-        memcpy(address, os->concreteStore, mo->size);
+std::size_t AddressSpace::copyOutConcretes() {
+  std::size_t numPages{};
+  for (const auto &object : objects) {
+    auto &mo = object.first;
+    auto &os = object.second;
+    if (!mo->isUserSpecified && !os->readOnly && os->size != 0) {
+      auto size = std::max(os->size, mo->alignment);
+      numPages +=
+          (size + MemoryManager::pageSize - 1) / MemoryManager::pageSize;
+      copyOutConcrete(mo, os.get());
     }
   }
+  return numPages;
+}
+
+void AddressSpace::copyOutConcrete(const MemoryObject *mo,
+                                   const ObjectState *os) const {
+  auto address = reinterpret_cast<std::uint8_t *>(mo->address);
+  std::memcpy(address, os->concreteStore, mo->size);
 }
 
 bool AddressSpace::copyInConcretes() {
diff --git a/lib/Core/AddressSpace.h b/lib/Core/AddressSpace.h
index 4df8d5f0..37ae76f9 100644
--- a/lib/Core/AddressSpace.h
+++ b/lib/Core/AddressSpace.h
@@ -126,7 +126,11 @@ namespace klee {
 
     /// Copy the concrete values of all managed ObjectStates into the
     /// actual system memory location they were allocated at.
-    void copyOutConcretes();
+    /// Returns the (hypothetical) number of pages needed provided each written
+    /// object occupies (at least) a single page.
+    std::size_t copyOutConcretes();
+
+    void copyOutConcrete(const MemoryObject *mo, const ObjectState *os) const;
 
     /// Copy the concrete values of all managed ObjectStates back from
     /// the actual system memory location they were allocated
diff --git a/lib/Core/ExecutionState.cpp b/lib/Core/ExecutionState.cpp
index 5231f7fb..1c1f477c 100644
--- a/lib/Core/ExecutionState.cpp
+++ b/lib/Core/ExecutionState.cpp
@@ -70,10 +70,14 @@ StackFrame::~StackFrame() {
 
 /***/
 
-ExecutionState::ExecutionState(KFunction *kf)
+ExecutionState::ExecutionState(KFunction *kf, MemoryManager *mm)
     : pc(kf->instructions), prevPC(pc) {
   pushFrame(nullptr, kf);
   setID();
+  if (mm->stackFactory && mm->heapFactory) {
+    stackAllocator = mm->stackFactory.makeAllocator();
+    heapAllocator = mm->heapFactory.makeAllocator();
+  }
 }
 
 ExecutionState::~ExecutionState() {
@@ -91,6 +95,8 @@ ExecutionState::ExecutionState(const ExecutionState& state):
     incomingBBIndex(state.incomingBBIndex),
     depth(state.depth),
     addressSpace(state.addressSpace),
+    stackAllocator(state.stackAllocator),
+    heapAllocator(state.heapAllocator),
     constraints(state.constraints),
     pathOS(state.pathOS),
     symPathOS(state.symPathOS),
@@ -127,11 +133,25 @@ void ExecutionState::pushFrame(KInstIterator caller, KFunction *kf) {
 
 void ExecutionState::popFrame() {
   const StackFrame &sf = stack.back();
-  for (const auto * memoryObject : sf.allocas)
+  for (const auto *memoryObject : sf.allocas) {
+    deallocate(memoryObject);
     addressSpace.unbindObject(memoryObject);
+  }
   stack.pop_back();
 }
 
+void ExecutionState::deallocate(const MemoryObject *mo) {
+  if (!stackAllocator || !heapAllocator)
+    return;
+
+  auto address = reinterpret_cast<void *>(mo->address);
+  if (mo->isLocal) {
+    stackAllocator.free(address, std::max(mo->size, mo->alignment));
+  } else {
+    heapAllocator.free(address, std::max(mo->size, mo->alignment));
+  }
+}
+
 void ExecutionState::addSymbolic(const MemoryObject *mo, const Array *array) {
   symbolics.emplace_back(ref<const MemoryObject>(mo), array);
 }
diff --git a/lib/Core/ExecutionState.h b/lib/Core/ExecutionState.h
index 49e232dc..6d6336dd 100644
--- a/lib/Core/ExecutionState.h
+++ b/lib/Core/ExecutionState.h
@@ -11,12 +11,14 @@
 #define KLEE_EXECUTIONSTATE_H
 
 #include "AddressSpace.h"
+#include "MemoryManager.h"
 #include "MergeHandler.h"
 
 #include "klee/ADT/ImmutableSet.h"
 #include "klee/ADT/TreeStream.h"
 #include "klee/Expr/Constraints.h"
 #include "klee/Expr/Expr.h"
+#include "klee/KDAlloc/kdalloc.h"
 #include "klee/Module/KInstIterator.h"
 #include "klee/Solver/Solver.h"
 #include "klee/System/Time.h"
@@ -180,6 +182,12 @@ public:
   /// @brief Address space used by this state (e.g. Global and Heap)
   AddressSpace addressSpace;
 
+  /// @brief Stack allocator (used with deterministic allocation)
+  kdalloc::StackAllocator stackAllocator;
+
+  /// @brief Heap allocator (used with deterministic allocation)
+  kdalloc::Allocator heapAllocator;
+
   /// @brief Constraints collected so far
   ConstraintSet constraints;
 
@@ -246,7 +254,7 @@ public:
   ExecutionState() = default;
 #endif
   // only to create the initial state
-  explicit ExecutionState(KFunction *kf);
+  explicit ExecutionState(KFunction *kf, MemoryManager *mm);
   // no copy assignment, use copy constructor
   ExecutionState &operator=(const ExecutionState &) = delete;
   // no move ctor
@@ -261,6 +269,8 @@ public:
   void pushFrame(KInstIterator caller, KFunction *kf);
   void popFrame();
 
+  void deallocate(const MemoryObject *mo);
+
   void addSymbolic(const MemoryObject *mo, const Array *array);
 
   void addConstraint(ref<Expr> e);
diff --git a/lib/Core/Executor.cpp b/lib/Core/Executor.cpp
index 1187654d..013f01c2 100644
--- a/lib/Core/Executor.cpp
+++ b/lib/Core/Executor.cpp
@@ -9,6 +9,7 @@
 
 #include "Executor.h"
 
+#include "AddressSpace.h"
 #include "Context.h"
 #include "CoreStats.h"
 #include "ExecutionState.h"
@@ -35,6 +36,7 @@
 #include "klee/Expr/ExprPPrinter.h"
 #include "klee/Expr/ExprSMTLIBPrinter.h"
 #include "klee/Expr/ExprUtil.h"
+#include "klee/KDAlloc/kdalloc.h"
 #include "klee/Module/Cell.h"
 #include "klee/Module/InstructionInfoTable.h"
 #include "klee/Module/KCallable.h"
@@ -81,6 +83,7 @@ typedef unsigned TypeSize;
 #include <algorithm>
 #include <cassert>
 #include <cerrno>
+#include <cstdint>
 #include <cstring>
 #include <cxxabi.h>
 #include <fstream>
@@ -90,6 +93,7 @@ typedef unsigned TypeSize;
 #include <sstream>
 #include <string>
 #include <sys/mman.h>
+#include <sys/resource.h>
 #include <vector>
 
 using namespace llvm;
@@ -208,6 +212,14 @@ cl::opt<bool> AllExternalWarnings(
              "as opposed to once per function (default=false)"),
     cl::cat(ExtCallsCat));
 
+cl::opt<std::size_t> ExternalPageThreshold(
+    "kdalloc-external-page-threshold", cl::init(1024),
+    cl::desc(
+        "Threshold for garbage collecting pages used by external calls. If "
+        "there is a significant number of infrequently used pages resident in "
+        "memory, these will only be cleaned up if the total number of pages "
+        "used for external calls is above the given threshold (default=1024)."),
+    cl::cat(ExtCallsCat));
 
 /*** Seeding options ***/
 
@@ -684,7 +696,7 @@ void Executor::allocateGlobalObjects(ExecutionState &state) {
       // We allocate an object to represent each function,
       // its address can be used for function pointers.
       // TODO: Check whether the object is accessed?
-      auto mo = memory->allocate(8, false, true, &f, 8);
+      auto mo = memory->allocate(8, false, true, &state, &f, 8);
       addr = Expr::createPointer(mo->address);
       legalFunctions.emplace(mo->address, &f);
     }
@@ -763,7 +775,8 @@ void Executor::allocateGlobalObjects(ExecutionState &state) {
     }
 
     MemoryObject *mo = memory->allocate(size, /*isLocal=*/false,
-                                        /*isGlobal=*/true, /*allocSite=*/&v,
+                                        /*isGlobal=*/true, /*state=*/nullptr,
+                                        /*allocSite=*/&v,
                                         /*alignment=*/globalObjectAlignment);
     if (!mo)
       klee_error("out of memory");
@@ -813,9 +826,6 @@ void Executor::initializeGlobalAliases() {
 void Executor::initializeGlobalObjects(ExecutionState &state) {
   const Module *m = kmodule->module.get();
 
-  // remember constant objects to initialise their counter part for external
-  // calls
-  std::vector<ObjectState *> constantObjects;
   for (const GlobalVariable &v : m->globals()) {
     MemoryObject *mo = globalObjects.find(&v)->second;
     ObjectState *os = bindObjectInState(state, mo, false);
@@ -838,22 +848,15 @@ void Executor::initializeGlobalObjects(ExecutionState &state) {
       }
     } else if (v.hasInitializer()) {
       initializeGlobalObject(state, os, v.getInitializer(), 0);
-      if (v.isConstant())
-        constantObjects.emplace_back(os);
+      if (v.isConstant()) {
+        os->setReadOnly(true);
+        // initialise constant memory that may be used with external calls
+        state.addressSpace.copyOutConcrete(mo, os);
+      }
     } else {
       os->initializeToRandom();
     }
   }
-
-  // initialise constant memory that is potentially used with external calls
-  if (!constantObjects.empty()) {
-    // initialise the actual memory with constant values
-    state.addressSpace.copyOutConcretes();
-
-    // mark constant objects as read-only
-    for (auto obj : constantObjects)
-      obj->setReadOnly(true);
-  }
 }
 
 
@@ -1537,7 +1540,7 @@ MemoryObject *Executor::serializeLandingpad(ExecutionState &state,
   }
 
   MemoryObject *mo =
-      memory->allocate(serialized.size(), true, false, nullptr, 1);
+      memory->allocate(serialized.size(), true, false, &state, nullptr, 1);
   ObjectState *os = bindObjectInState(state, mo, false);
   for (unsigned i = 0; i < serialized.size(); i++) {
     os->write8(i, serialized[i]);
@@ -1987,7 +1990,7 @@ void Executor::executeCall(ExecutionState &state, KInstruction *ki, Function *f,
 
       StackFrame &sf = state.stack.back();
       MemoryObject *mo = sf.varargs =
-          memory->allocate(size, true, false, state.prevPC->inst,
+          memory->allocate(size, true, false, &state, state.prevPC->inst,
                            (requires16ByteAlignment ? 16 : 8));
       if (!mo && size) {
         terminateStateOnExecError(state, "out of memory (varargs)");
@@ -2054,7 +2057,7 @@ void Executor::transferToBasicBlock(BasicBlock *dst, BasicBlock *src,
 
 /// Compute the true target of a function call, resolving LLVM aliases
 /// and bitcasts.
-Function* Executor::getTargetFunction(Value *calledVal, ExecutionState &state) {
+Function *Executor::getTargetFunction(Value *calledVal) {
   SmallPtrSet<const GlobalValue*, 3> Visited;
 
   Constant *c = dyn_cast<Constant>(calledVal);
@@ -2418,7 +2421,7 @@ void Executor::executeInstruction(ExecutionState &state, KInstruction *ki) {
     const CallBase &cb = cast<CallBase>(*i);
     Value *fp = cb.getCalledOperand();
     unsigned numArgs = cb.arg_size();
-    Function *f = getTargetFunction(fp, state);
+    Function *f = getTargetFunction(fp);
 
     // evaluate arguments
     std::vector< ref<Expr> > arguments;
@@ -3873,7 +3876,35 @@ void Executor::callExternalFunction(ExecutionState &state,
   }
 
   // Prepare external memory for invoking the function
-  state.addressSpace.copyOutConcretes();
+  static std::size_t residentPages = 0;
+  double avgNeededPages = 0;
+  if (MemoryManager::isDeterministic) {
+    auto const minflt = [] {
+      struct rusage ru = {};
+      int ret = getrusage(RUSAGE_SELF, &ru);
+      assert(!ret && "getrusage failed");
+      assert(ru.ru_minflt >= 0);
+      return ru.ru_minflt;
+    };
+
+    auto tmp = minflt();
+    std::size_t neededPages = state.addressSpace.copyOutConcretes();
+    auto newPages = minflt() - tmp;
+    assert(newPages >= 0);
+    residentPages += newPages;
+    assert(residentPages >= neededPages &&
+           "allocator too full, assumption that each object occupies its own "
+           "page is no longer true");
+
+    // average of pages needed for an external function call
+    static double avgNeededPages_ = residentPages;
+    // exponential moving average with alpha = 1/3
+    avgNeededPages_ = (3.0 * avgNeededPages_ + neededPages) / 4.0;
+    avgNeededPages = avgNeededPages_;
+  } else {
+    state.addressSpace.copyOutConcretes();
+  }
+
 #ifndef WINDOWS
   // Update external errno state with local state value
   int *errno_addr = getErrnoLocation(state);
@@ -3926,6 +3957,13 @@ void Executor::callExternalFunction(ExecutionState &state,
     return;
   }
 
+  if (MemoryManager::isDeterministic && residentPages > ExternalPageThreshold &&
+      residentPages > 2 * avgNeededPages) {
+    if (memory->markMappingsAsUnneeded()) {
+      residentPages = 0;
+    }
+  }
+
 #ifndef WINDOWS
   // Update errno memory object with the errno value from the call
   int error = externalDispatcher->getLastErrno();
@@ -4002,7 +4040,7 @@ void Executor::executeAlloc(ExecutionState &state,
     }
     MemoryObject *mo =
         memory->allocate(CE->getZExtValue(), isLocal, /*isGlobal=*/false,
-                         allocSite, allocationAlignment);
+                         &state, allocSite, allocationAlignment);
     if (!mo) {
       bindLocal(target, state, 
                 ConstantExpr::alloc(0, Context::get().getPointerWidth()));
@@ -4019,7 +4057,9 @@ void Executor::executeAlloc(ExecutionState &state,
         unsigned count = std::min(reallocFrom->size, os->size);
         for (unsigned i=0; i<count; i++)
           os->write(i, reallocFrom->read8(i));
-        state.addressSpace.unbindObject(reallocFrom->getObject());
+        const MemoryObject *reallocObject = reallocFrom->getObject();
+        state.deallocate(reallocObject);
+        state.addressSpace.unbindObject(reallocObject);
       }
     }
   } else {
@@ -4134,6 +4174,7 @@ void Executor::executeFree(ExecutionState &state,
                               StateTerminationType::Free,
                               getAddressInfo(*it->second, address));
       } else {
+        it->second->deallocate(mo);
         it->second->addressSpace.unbindObject(mo);
         if (target)
           bindLocal(target, *it->second, Expr::createPointer(0));
@@ -4168,6 +4209,19 @@ void Executor::resolveExact(ExecutionState &state,
   }
 
   if (unbound) {
+    auto CE = dyn_cast<ConstantExpr>(p);
+    if (MemoryManager::isDeterministic && CE) {
+      using kdalloc::LocationInfo;
+      auto ptr = reinterpret_cast<void *>(CE->getZExtValue());
+      auto locinfo = unbound->heapAllocator.location_info(ptr, 1);
+      if (locinfo == LocationInfo::LI_AllocatedOrQuarantined &&
+          locinfo.getBaseAddress() == ptr && name == "free") {
+        terminateStateOnError(*unbound, "memory error: double free",
+                              StateTerminationType::Ptr,
+                              getAddressInfo(*unbound, p));
+        return;
+      }
+    }
     terminateStateOnError(*unbound, "memory error: invalid pointer: " + name,
                           StateTerminationType::Ptr, getAddressInfo(*unbound, p));
   }
@@ -4293,6 +4347,32 @@ void Executor::executeMemoryOperation(ExecutionState &state,
     if (incomplete) {
       terminateStateOnSolverError(*unbound, "Query timed out (resolve).");
     } else {
+      if (auto CE = dyn_cast<ConstantExpr>(address)) {
+        std::uintptr_t ptrval = CE->getZExtValue();
+        auto ptr = reinterpret_cast<void *>(ptrval);
+        if (ptrval < MemoryManager::pageSize) {
+          terminateStateOnError(*unbound, "memory error: null page access",
+                                StateTerminationType::Ptr,
+                                getAddressInfo(*unbound, address));
+          return;
+        } else if (MemoryManager::isDeterministic) {
+          using kdalloc::LocationInfo;
+          auto li = unbound->heapAllocator.location_info(ptr, bytes);
+          if (li == LocationInfo::LI_AllocatedOrQuarantined) {
+            // In case there is no size mismatch (checked by resolving for base
+            // address), the object is quarantined.
+            auto base = reinterpret_cast<std::uintptr_t>(li.getBaseAddress());
+            auto baseExpr = Expr::createPointer(base);
+            ObjectPair op;
+            if (!unbound->addressSpace.resolveOne(baseExpr, op)) {
+              terminateStateOnError(*unbound, "memory error: use after free",
+                                    StateTerminationType::Ptr,
+                                    getAddressInfo(*unbound, address));
+              return;
+            }
+          }
+        }
+      }
       terminateStateOnError(*unbound, "memory error: out of bound pointer",
                             StateTerminationType::Ptr,
                             getAddressInfo(*unbound, address));
@@ -4404,10 +4484,10 @@ void Executor::runFunctionAsMain(Function *f,
     arguments.push_back(ConstantExpr::alloc(argc, Expr::Int32));
     if (++ai!=ae) {
       Instruction *first = &*(f->begin()->begin());
-      argvMO =
-          memory->allocate((argc + 1 + envc + 1 + 1) * NumPtrBytes,
-                           /*isLocal=*/false, /*isGlobal=*/true,
-                           /*allocSite=*/first, /*alignment=*/8);
+      argvMO = memory->allocate((argc + 1 + envc + 1 + 1) * NumPtrBytes,
+                                /*isLocal=*/false, /*isGlobal=*/true,
+                                /*state=*/nullptr, /*allocSite=*/first,
+                                /*alignment=*/8);
 
       if (!argvMO)
         klee_error("Could not allocate memory for function arguments");
@@ -4424,7 +4504,7 @@ void Executor::runFunctionAsMain(Function *f,
     }
   }
 
-  ExecutionState *state = new ExecutionState(kmodule->functionMap[f]);
+  ExecutionState *state = new ExecutionState(kmodule->functionMap[f], memory);
 
   if (pathWriter) 
     state->pathOS = pathWriter->open();
@@ -4452,7 +4532,8 @@ void Executor::runFunctionAsMain(Function *f,
 
         MemoryObject *arg =
             memory->allocate(len + 1, /*isLocal=*/false, /*isGlobal=*/true,
-                             /*allocSite=*/state->pc->inst, /*alignment=*/8);
+                             state, /*allocSite=*/state->pc->inst,
+                             /*alignment=*/8);
         if (!arg)
           klee_error("Could not allocate memory for function arguments");
         ObjectState *os = bindObjectInState(*state, arg, false);
diff --git a/lib/Core/Executor.h b/lib/Core/Executor.h
index 279d8bee..21d0d081 100644
--- a/lib/Core/Executor.h
+++ b/lib/Core/Executor.h
@@ -138,13 +138,13 @@ private:
   /// happens with other states (that don't satisfy the seeds) depends
   /// on as-yet-to-be-determined flags.
   std::map<ExecutionState*, std::vector<SeedInfo> > seedMap;
-  
+
   /// Map of globals to their representative memory object.
   std::map<const llvm::GlobalValue*, MemoryObject*> globalObjects;
 
   /// Map of globals to their bound address. This also includes
-  /// globals that have no representative object (i.e. functions).
-  std::map<const llvm::GlobalValue*, ref<ConstantExpr> > globalAddresses;
+  /// globals that have no representative object (e.g. aliases).
+  std::map<const llvm::GlobalValue*, ref<ConstantExpr>> globalAddresses;
 
   /// Map of legal function addresses to the corresponding Function.
   /// Used to validate and dereference function pointers.
@@ -212,8 +212,7 @@ private:
   /// Return the typeid corresponding to a certain `type_info`
   ref<ConstantExpr> getEhTypeidFor(ref<Expr> type_info);
 
-  llvm::Function* getTargetFunction(llvm::Value *calledVal,
-                                    ExecutionState &state);
+  llvm::Function* getTargetFunction(llvm::Value *calledVal);
   
   void executeInstruction(ExecutionState &state, KInstruction *ki);
 
diff --git a/lib/Core/Memory.h b/lib/Core/Memory.h
index 7e1f097a..9a936746 100644
--- a/lib/Core/Memory.h
+++ b/lib/Core/Memory.h
@@ -50,6 +50,7 @@ public:
 
   /// size in bytes
   unsigned size;
+  unsigned alignment;
   mutable std::string name;
 
   bool isLocal;
@@ -76,18 +77,20 @@ public:
     : id(counter++),
       address(_address),
       size(0),
+      alignment(0),
       isFixed(true),
       parent(NULL),
       allocSite(0) {
   }
 
-  MemoryObject(uint64_t _address, unsigned _size, 
+  MemoryObject(uint64_t _address, unsigned _size, unsigned _alignment,
                bool _isLocal, bool _isGlobal, bool _isFixed,
                const llvm::Value *_allocSite,
                MemoryManager *_parent)
     : id(counter++),
       address(_address),
       size(_size),
+      alignment(_alignment),
       name("unnamed"),
       isLocal(_isLocal),
       isGlobal(_isGlobal),
diff --git a/lib/Core/MemoryManager.cpp b/lib/Core/MemoryManager.cpp
index f15c0db9..0e47500a 100644
--- a/lib/Core/MemoryManager.cpp
+++ b/lib/Core/MemoryManager.cpp
@@ -10,76 +10,261 @@
 #include "MemoryManager.h"
 
 #include "CoreStats.h"
+#include "ExecutionState.h"
 #include "Memory.h"
 
 #include "klee/Expr/Expr.h"
 #include "klee/Support/ErrorHandling.h"
 
+#include "llvm/IR/GlobalVariable.h"
 #include "llvm/Support/CommandLine.h"
 #include "llvm/Support/MathExtras.h"
+#if LLVM_VERSION_CODE >= LLVM_VERSION(10, 0)
+#include "llvm/Support/Alignment.h"
+#else
+#include "llvm/Support/MathExtras.h"
+#endif
 
-#include <inttypes.h>
+#include <cinttypes>
+#include <algorithm>
 #include <sys/mman.h>
+#include <tuple>
+#include <string>
 
 using namespace klee;
 
-namespace {
+namespace klee {
+std::uint32_t MemoryManager::quarantine;
+
+std::size_t MemoryManager::pageSize = sysconf(_SC_PAGE_SIZE);
 
+bool MemoryManager::isDeterministic;
+} // namespace klee
+
+namespace {
 llvm::cl::OptionCategory MemoryCat("Memory management options",
                                    "These options control memory management.");
 
-llvm::cl::opt<bool> DeterministicAllocation(
-    "allocate-determ",
+llvm::cl::opt<bool, true> DeterministicAllocation(
+    "kdalloc",
     llvm::cl::desc("Allocate memory deterministically (default=false)"),
-    llvm::cl::init(false), llvm::cl::cat(MemoryCat));
+    llvm::cl::location(MemoryManager::isDeterministic), llvm::cl::init(false),
+    llvm::cl::cat(MemoryCat));
+
+llvm::cl::opt<bool> DeterministicAllocationMarkAsUnneeded(
+    "kdalloc-mark-as-unneeded",
+    llvm::cl::desc("Mark allocations as unneeded after external function calls "
+                   "(default=true)"),
+    llvm::cl::init(true), llvm::cl::cat(MemoryCat));
+
+llvm::cl::opt<unsigned> DeterministicAllocationGlobalsSize(
+    "kdalloc-globals-size",
+    llvm::cl::desc("Reserved memory for globals in GiB (default=10)"),
+    llvm::cl::init(10), llvm::cl::cat(MemoryCat));
+
+llvm::cl::opt<unsigned> DeterministicAllocationConstantsSize(
+    "kdalloc-constants-size",
+    llvm::cl::desc("Reserved memory for constant globals in GiB (default=10)"),
+    llvm::cl::init(10), llvm::cl::cat(MemoryCat));
+
+llvm::cl::opt<unsigned> DeterministicAllocationHeapSize(
+    "kdalloc-heap-size",
+    llvm::cl::desc("Reserved memory for heap in GiB (default=1024)"),
+    llvm::cl::init(1024), llvm::cl::cat(MemoryCat));
+
+llvm::cl::opt<unsigned> DeterministicAllocationStackSize(
+    "kdalloc-stack-size",
+    llvm::cl::desc("Reserved memory for stack in GiB (default=100)"),
+    llvm::cl::init(128), llvm::cl::cat(MemoryCat));
 
-llvm::cl::opt<unsigned> DeterministicAllocationSize(
-    "allocate-determ-size",
+llvm::cl::opt<std::uintptr_t> DeterministicAllocationGlobalsStartAddress(
+    "kdalloc-globals-start-address",
     llvm::cl::desc(
-        "Preallocated memory for deterministic allocation in MB (default=100)"),
-    llvm::cl::init(100), llvm::cl::cat(MemoryCat));
+        "Start address for globals segment (has to be page aligned)"),
+    llvm::cl::cat(MemoryCat));
+
+llvm::cl::opt<std::uintptr_t> DeterministicAllocationConstantsStartAddress(
+    "kdalloc-constants-start-address",
+    llvm::cl::desc(
+        "Start address for constant globals segment (has to be page aligned)"),
+    llvm::cl::cat(MemoryCat));
+
+llvm::cl::opt<std::uintptr_t> DeterministicAllocationHeapStartAddress(
+    "kdalloc-heap-start-address",
+    llvm::cl::desc("Start address for heap segment (has to be page aligned)"),
+    llvm::cl::cat(MemoryCat));
+
+llvm::cl::opt<std::uintptr_t> DeterministicAllocationStackStartAddress(
+    "kdalloc-stack-start-address",
+    llvm::cl::desc("Start address for stack segment (has to be page aligned)"),
+    llvm::cl::cat(MemoryCat));
+
+struct QuarantineSizeParser : public llvm::cl::parser<std::uint32_t> {
+  explicit QuarantineSizeParser(llvm::cl::Option &O)
+      : llvm::cl::parser<std::uint32_t>(O) {}
+
+  bool parse(llvm::cl::Option &O, llvm::StringRef ArgName, llvm::StringRef Arg,
+             std::uint32_t &Val) {
+    if (Arg == "-1") {
+      Val = kdalloc::Allocator::unlimitedQuarantine;
+    } else if (Arg.getAsInteger(0, Val)) {
+      return O.error("'" + Arg + "' value invalid!");
+    }
+
+    return false;
+  }
+};
+
+llvm::cl::opt<std::uint32_t, true, QuarantineSizeParser>
+    DeterministicAllocationQuarantineSize(
+        "kdalloc-quarantine",
+        llvm::cl::desc("Size of quarantine queues in allocator (default=8, "
+                       "disabled=0, unlimited=-1)"),
+        llvm::cl::location(klee::MemoryManager::quarantine),
+        llvm::cl::value_desc("size"), llvm::cl::init(8),
+        llvm::cl::cat(MemoryCat));
 
 llvm::cl::opt<bool> NullOnZeroMalloc(
     "return-null-on-zero-malloc",
     llvm::cl::desc("Returns NULL if malloc(0) is called (default=false)"),
     llvm::cl::init(false), llvm::cl::cat(MemoryCat));
-
-llvm::cl::opt<unsigned> RedzoneSize(
-    "redzone-size",
-    llvm::cl::desc("Set the size of the redzones to be added after each "
-                   "allocation (in bytes). This is important to detect "
-                   "out-of-bounds accesses (default=10)"),
-    llvm::cl::init(10), llvm::cl::cat(MemoryCat));
-
-llvm::cl::opt<unsigned long long> DeterministicStartAddress(
-    "allocate-determ-start-address",
-    llvm::cl::desc("Start address for deterministic allocation. Has to be page "
-                   "aligned (default=0x7ff30000000)"),
-    llvm::cl::init(0x7ff30000000), llvm::cl::cat(MemoryCat));
 } // namespace
 
 /***/
 MemoryManager::MemoryManager(ArrayCache *_arrayCache)
-    : arrayCache(_arrayCache), deterministicSpace(0), nextFreeSlot(0),
-      spaceSize(DeterministicAllocationSize.getValue() * 1024 * 1024) {
+    : arrayCache(_arrayCache) {
   if (DeterministicAllocation) {
-    // Page boundary
-    void *expectedAddress = (void *)DeterministicStartAddress.getValue();
+    if (DeterministicAllocationQuarantineSize ==
+        kdalloc::Allocator::unlimitedQuarantine) {
+      klee_message("Deterministic allocator: Using unlimited quarantine");
+    } else if (DeterministicAllocationQuarantineSize != 0) {
+      klee_message("Deterministic allocator: Using quarantine queue size %u",
+                   DeterministicAllocationQuarantineSize.getValue());
+    }
 
-    char *newSpace =
-        (char *)mmap(expectedAddress, spaceSize, PROT_READ | PROT_WRITE,
-                     MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
+    std::vector<std::tuple<std::string,
+                           std::uintptr_t, // start address (0 if none
+                                           // requested)
+                           std::size_t,    // size of segment
+                           std::reference_wrapper<kdalloc::AllocatorFactory>,
+                           kdalloc::Allocator *>>
+        requestedSegments;
+    requestedSegments.emplace_back(
+        "globals",
+        DeterministicAllocationGlobalsStartAddress
+            ? DeterministicAllocationGlobalsStartAddress.getValue()
+            : 0,
+        static_cast<std::size_t>(
+            DeterministicAllocationGlobalsSize.getValue()) *
+            1024 * 1024 * 1024,
+        globalsFactory, &globalsAllocator);
+    requestedSegments.emplace_back(
+        "constants",
+        DeterministicAllocationConstantsStartAddress
+            ? DeterministicAllocationConstantsStartAddress.getValue()
+            : 0,
+        static_cast<std::size_t>(
+            DeterministicAllocationConstantsSize.getValue()) *
+            1024 * 1024 * 1024,
+        constantsFactory, &constantsAllocator);
+    requestedSegments.emplace_back(
+        "heap",
+        DeterministicAllocationHeapStartAddress
+            ? DeterministicAllocationHeapStartAddress.getValue()
+            : 0,
+        static_cast<std::size_t>(DeterministicAllocationHeapSize.getValue()) *
+            1024 * 1024 * 1024,
+        heapFactory, nullptr);
+    requestedSegments.emplace_back(
+        "stack",
+        DeterministicAllocationStackStartAddress
+            ? DeterministicAllocationStackStartAddress.getValue()
+            : 0,
+        static_cast<std::size_t>(DeterministicAllocationStackSize.getValue()) *
+            1024 * 1024 * 1024,
+        stackFactory, nullptr);
 
-    if (newSpace == MAP_FAILED) {
-      klee_error("Couldn't mmap() memory for deterministic allocations");
-    }
-    if (expectedAddress != newSpace && expectedAddress != 0) {
-      klee_error("Could not allocate memory deterministically");
+    // check invariants
+#if LLVM_VERSION_CODE >= LLVM_VERSION(10, 0)
+    llvm::Align pageAlignment(pageSize);
+#endif
+    for (auto &requestedSegment : requestedSegments) {
+      auto &segment1 = std::get<0>(requestedSegment);
+      auto &start1 = std::get<1>(requestedSegment);
+      auto &size1 = std::get<2>(requestedSegment);
+      // check for page alignment
+      // NOTE: sizes are assumed to be page aligned due to multiplication
+#if LLVM_VERSION_CODE >= LLVM_VERSION(10, 0)
+      if (start1 != 0 && !llvm::isAligned(pageAlignment, start1)) {
+        klee_error("Deterministic allocator: Requested start address for %s "
+                   "is not page aligned (page size: %zu B)",
+                   segment1.c_str(), pageAlignment.value());
+      }
+#else
+      if (start1 != 0 && llvm::OffsetToAlignment(start1, pageSize) != 0) {
+        klee_error("Deterministic allocator: Requested start address for %s "
+                   "is not page aligned (page size: %zu B)",
+                   segment1.c_str(), pageSize);
+      }
+#endif
+
+      // check for overlap of segments
+      std::uintptr_t end1 = start1 + size1;
+      for (auto &requestedSegment : requestedSegments) {
+        auto &segment2 = std::get<0>(requestedSegment);
+        auto &start2 = std::get<1>(requestedSegment);
+        auto &size2 = std::get<2>(requestedSegment);
+        if (start1 != 0 && start2 != 0 && segment1 != segment2) {
+          std::uintptr_t end2 = start2 + size2;
+          if (!(end1 <= start2 || start1 >= end2)) {
+            klee_error("Deterministic allocator: Requested mapping for %s "
+                       "(start-address=0x%" PRIxPTR " size=%zu GiB) "
+                       "overlaps with that for %s "
+                       "(start-address=0x%" PRIxPTR " size=%zu GiB)",
+                       segment1.c_str(), start1, size1 / (1024 * 1024 * 1024),
+                       segment2.c_str(), start2, size2 / (1024 * 1024 * 1024));
+          }
+        }
+      }
     }
 
-    klee_message("Deterministic memory allocation starting from %p", newSpace);
-    deterministicSpace = newSpace;
-    nextFreeSlot = newSpace;
+    // initialize factories and allocators
+    for (auto &requestedSegment : requestedSegments) {
+      auto &segment = std::get<0>(requestedSegment);
+      auto &start = std::get<1>(requestedSegment);
+      auto &size = std::get<2>(requestedSegment);
+      auto &factory = std::get<3>(requestedSegment);
+      auto &allocator = std::get<4>(requestedSegment);
+      factory.get() = kdalloc::AllocatorFactory(
+          start, size, DeterministicAllocationQuarantineSize);
+
+      if (!factory.get()) {
+        klee_error(
+            "Deterministic allocator: Could not allocate mapping for %s: %s",
+            segment.c_str(), strerror(errno));
+      }
+      if (start && factory.get().getMapping().getBaseAddress() !=
+                       reinterpret_cast<void *>(start)) {
+        klee_error("Deterministic allocator: Could not allocate mapping for %s "
+                   "at requested address",
+                   segment.c_str());
+      }
+      if (factory.get().getMapping().getSize() != size) {
+        klee_error("Deterministic allocator: Could not allocate mapping for %s "
+                   "with the requested size",
+                   segment.c_str());
+      }
+
+      klee_message("Deterministic allocator: %s "
+                   "(start-address=0x%" PRIxPTR " size=%zu GiB)",
+                   segment.c_str(),
+                   reinterpret_cast<std::uintptr_t>(
+                       factory.get().getMapping().getBaseAddress()),
+                   size / (1024 * 1024 * 1024));
+      if (allocator) {
+        *allocator = factory.get().makeAllocator();
+      }
+    }
   }
 }
 
@@ -91,13 +276,10 @@ MemoryManager::~MemoryManager() {
     objects.erase(mo);
     delete mo;
   }
-
-  if (DeterministicAllocation)
-    munmap(deterministicSpace, spaceSize);
 }
 
 MemoryObject *MemoryManager::allocate(uint64_t size, bool isLocal,
-                                      bool isGlobal,
+                                      bool isGlobal, ExecutionState *state,
                                       const llvm::Value *allocSite,
                                       size_t alignment) {
   if (size > 10 * 1024 * 1024)
@@ -116,19 +298,29 @@ MemoryObject *MemoryManager::allocate(uint64_t size, bool isLocal,
 
   uint64_t address = 0;
   if (DeterministicAllocation) {
-    address = llvm::alignTo((uint64_t)nextFreeSlot + alignment - 1, alignment);
+    void *allocAddress;
 
-    // Handle the case of 0-sized allocations as 1-byte allocations.
-    // This way, we make sure we have this allocation between its own red zones
-    size_t alloc_size = std::max(size, (uint64_t)1);
-    if ((char *)address + alloc_size < deterministicSpace + spaceSize) {
-      nextFreeSlot = (char *)address + alloc_size + RedzoneSize;
+    if (isGlobal) {
+      const llvm::GlobalVariable *gv =
+          dyn_cast<llvm::GlobalVariable>(allocSite);
+      if (isa<llvm::Function>(allocSite) || (gv && gv->isConstant())) {
+        allocAddress = constantsAllocator.allocate(
+            std::max(size, static_cast<std::uint64_t>(alignment)));
+      } else {
+        allocAddress = globalsAllocator.allocate(
+            std::max(size, static_cast<std::uint64_t>(alignment)));
+      }
     } else {
-      klee_warning_once(0, "Couldn't allocate %" PRIu64
-                           " bytes. Not enough deterministic space left.",
-                        size);
-      address = 0;
+      if (isLocal) {
+        allocAddress = state->stackAllocator.allocate(
+            std::max(size, static_cast<std::uint64_t>(alignment)));
+      } else {
+        allocAddress = state->heapAllocator.allocate(
+            std::max(size, static_cast<std::uint64_t>(alignment)));
+      }
     }
+
+    address = reinterpret_cast<std::uint64_t>(allocAddress);
   } else {
     // Use malloc for the standard case
     if (alignment <= 8)
@@ -146,8 +338,8 @@ MemoryObject *MemoryManager::allocate(uint64_t size, bool isLocal,
     return 0;
 
   ++stats::allocations;
-  MemoryObject *res = new MemoryObject(address, size, isLocal, isGlobal, false,
-                                       allocSite, this);
+  MemoryObject *res = new MemoryObject(address, size, alignment, isLocal,
+                                       isGlobal, false, allocSite, this);
   objects.insert(res);
   return res;
 }
@@ -165,13 +357,11 @@ MemoryObject *MemoryManager::allocateFixed(uint64_t address, uint64_t size,
 
   ++stats::allocations;
   MemoryObject *res =
-      new MemoryObject(address, size, false, true, true, allocSite, this);
+      new MemoryObject(address, size, 0, false, true, true, allocSite, this);
   objects.insert(res);
   return res;
 }
 
-void MemoryManager::deallocate(const MemoryObject *mo) { assert(0); }
-
 void MemoryManager::markFreed(MemoryObject *mo) {
   if (objects.find(mo) != objects.end()) {
     if (!mo->isFixed && !DeterministicAllocation)
@@ -180,6 +370,21 @@ void MemoryManager::markFreed(MemoryObject *mo) {
   }
 }
 
-size_t MemoryManager::getUsedDeterministicSize() {
-  return nextFreeSlot - deterministicSpace;
+bool MemoryManager::markMappingsAsUnneeded() {
+  if (!DeterministicAllocation)
+    return false;
+
+  if (!DeterministicAllocationMarkAsUnneeded)
+    return false;
+
+  globalsFactory.getMapping().clear();
+  heapFactory.getMapping().clear();
+  stackFactory.getMapping().clear();
+
+  return true;
 }
+
+size_t MemoryManager::getUsedDeterministicSize() {
+  // TODO: implement
+  return 0;
+}
\ No newline at end of file
diff --git a/lib/Core/MemoryManager.h b/lib/Core/MemoryManager.h
index f75c82fb..71a70183 100644
--- a/lib/Core/MemoryManager.h
+++ b/lib/Core/MemoryManager.h
@@ -10,6 +10,8 @@
 #ifndef KLEE_MEMORYMANAGER_H
 #define KLEE_MEMORYMANAGER_H
 
+#include "klee/KDAlloc/kdalloc.h"
+
 #include <cstddef>
 #include <set>
 #include <cstdint>
@@ -19,8 +21,9 @@ class Value;
 }
 
 namespace klee {
-class MemoryObject;
 class ArrayCache;
+class ExecutionState;
+class MemoryObject;
 
 class MemoryManager {
 private:
@@ -28,24 +31,36 @@ private:
   objects_ty objects;
   ArrayCache *const arrayCache;
 
-  char *deterministicSpace;
-  char *nextFreeSlot;
-  size_t spaceSize;
+  kdalloc::AllocatorFactory globalsFactory;
+  kdalloc::Allocator globalsAllocator;
+
+  kdalloc::AllocatorFactory constantsFactory;
+  kdalloc::Allocator constantsAllocator;
 
 public:
   MemoryManager(ArrayCache *arrayCache);
   ~MemoryManager();
 
+  kdalloc::AllocatorFactory heapFactory;
+  kdalloc::StackAllocatorFactory stackFactory;
+
+  static std::uint32_t quarantine;
+
+  static std::size_t pageSize;
+
+  static bool isDeterministic;
+
   /**
    * Returns memory object which contains a handle to real virtual process
    * memory.
    */
   MemoryObject *allocate(uint64_t size, bool isLocal, bool isGlobal,
-                         const llvm::Value *allocSite, size_t alignment);
+                         ExecutionState *state, const llvm::Value *allocSite,
+                         size_t alignment);
   MemoryObject *allocateFixed(uint64_t address, uint64_t size,
                               const llvm::Value *allocSite);
-  void deallocate(const MemoryObject *mo);
   void markFreed(MemoryObject *mo);
+  bool markMappingsAsUnneeded();
   ArrayCache *getArrayCache() const { return arrayCache; }
 
   /*
diff --git a/test/Feature/VarArg.c b/test/Feature/VarArg.c
index 0b8b6698..67807a11 100644
--- a/test/Feature/VarArg.c
+++ b/test/Feature/VarArg.c
@@ -7,7 +7,7 @@
 
 // RUN: %clang %s -emit-llvm %O0opt -c -g -o %t1.bc
 // RUN: rm -rf %t.klee-out
-// RUN: %klee --output-dir=%t.klee-out --allocate-determ=true --allocate-determ-start-address=0x0 %t1.bc | FileCheck %s
+// RUN: %klee --output-dir=%t.klee-out --kdalloc %t1.bc | FileCheck %s
 // RUN: test -f %t.klee-out/test000001.ptr.err
 
 #include <stdarg.h>