diff options
Diffstat (limited to 'src/libexpr/eval-inline.hh')
-rw-r--r-- | src/libexpr/eval-inline.hh | 91 |
1 files changed, 78 insertions, 13 deletions
diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index aef1f6351..08a419923 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -24,6 +24,81 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const } +/* Note: Various places expect the allocated memory to be zeroed. */ +[[gnu::always_inline]] +inline void * allocBytes(size_t n) +{ + void * p; +#if HAVE_BOEHMGC + p = GC_MALLOC(n); +#else + p = calloc(n, 1); +#endif + if (!p) throw std::bad_alloc(); + return p; +} + + +[[gnu::always_inline]] +Value * EvalState::allocValue() +{ +#if HAVE_BOEHMGC + /* We use the boehm batch allocator to speed up allocations of Values (of which there are many). + GC_malloc_many returns a linked list of objects of the given size, where the first word + of each object is also the pointer to the next object in the list. This also means that we + have to explicitly clear the first word of every object we take. */ + if (!*valueAllocCache) { + *valueAllocCache = GC_malloc_many(sizeof(Value)); + if (!*valueAllocCache) throw std::bad_alloc(); + } + + /* GC_NEXT is a convenience macro for accessing the first word of an object. + Take the first list item, advance the list to the next item, and clear the next pointer. */ + void * p = *valueAllocCache; + *valueAllocCache = GC_NEXT(p); + GC_NEXT(p) = nullptr; +#else + void * p = allocBytes(sizeof(Value)); +#endif + + nrValues++; + return (Value *) p; +} + + +[[gnu::always_inline]] +Env & EvalState::allocEnv(size_t size) +{ + nrEnvs++; + nrValuesInEnvs += size; + + Env * env; + +#if HAVE_BOEHMGC + if (size == 1) { + /* see allocValue for explanations. */ + if (!*env1AllocCache) { + *env1AllocCache = GC_malloc_many(sizeof(Env) + sizeof(Value *)); + if (!*env1AllocCache) throw std::bad_alloc(); + } + + void * p = *env1AllocCache; + *env1AllocCache = GC_NEXT(p); + GC_NEXT(p) = nullptr; + env = (Env *) p; + } else +#endif + env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *)); + + env->type = Env::Plain; + + /* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */ + + return *env; +} + + +[[gnu::always_inline]] void EvalState::forceValue(Value & v, const Pos & pos) { forceValue(v, [&]() { return pos; }); @@ -52,6 +127,7 @@ void EvalState::forceValue(Value & v, Callable getPos) } +[[gnu::always_inline]] inline void EvalState::forceAttrs(Value & v, const Pos & pos) { forceAttrs(v, [&]() { return pos; }); @@ -59,6 +135,7 @@ inline void EvalState::forceAttrs(Value & v, const Pos & pos) template <typename Callable> +[[gnu::always_inline]] inline void EvalState::forceAttrs(Value & v, Callable getPos) { forceValue(v, getPos); @@ -67,6 +144,7 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos) } +[[gnu::always_inline]] inline void EvalState::forceList(Value & v, const Pos & pos) { forceValue(v, pos); @@ -74,18 +152,5 @@ inline void EvalState::forceList(Value & v, const Pos & pos) throwTypeError(pos, "value is %1% while a list was expected", v); } -/* Note: Various places expect the allocated memory to be zeroed. */ -inline void * allocBytes(size_t n) -{ - void * p; -#if HAVE_BOEHMGC - p = GC_MALLOC(n); -#else - p = calloc(n, 1); -#endif - if (!p) throw std::bad_alloc(); - return p; -} - } |