From ca2a8fe95396760e7bda4c447530203a87359698 Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Mon, 23 Mar 2026 07:24:41 -0700 Subject: [PATCH] Fix undefined behavior in schedule.cc operator delete Schedule.cc currently makes use of a custom slab allocator by having some structs specify a custom operator new/operator delete that call into the slab allocator. However, this setup currently relies on C++ UB, namely that writes that happen in operator delete are persisted afterwards. The slab allocator inside the free_slab function uses the memory of the object being freed to store allocator metadata, which is not allowed given the rules around operator delete. This patch changes the internal storage of slab_allocator to a struct rather than a union so we can only return the actual storage when allocating an object and there is a header for each object that the allocator can use for metadata without writes to it being UB. --- vvp/slab.h | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/vvp/slab.h b/vvp/slab.h index e9c3e1a3f..5cc6e2953 100644 --- a/vvp/slab.h +++ b/vvp/slab.h @@ -25,8 +25,8 @@ template class slab_t { - union item_cell_u { - item_cell_u*next; + struct item_cell_s { + item_cell_s*next; char space[SLAB_SIZE]; }; @@ -44,11 +44,11 @@ template class slab_t { unsigned long pool; private: - item_cell_u*heap_; - item_cell_u initial_chunk_[CHUNK_COUNT]; + item_cell_s*heap_; + item_cell_s initial_chunk_[CHUNK_COUNT]; #ifdef CHECK_WITH_VALGRIND // Each slab needs a pointer to the allocated space. - item_cell_u**slab_pool; + item_cell_s**slab_pool; unsigned slab_pool_count; #endif }; @@ -73,11 +73,11 @@ template inline void* slab_t::alloc_slab() { if (heap_ == 0) { - item_cell_u*chunk = new item_cell_u[CHUNK_COUNT]; + item_cell_s*chunk = new item_cell_s[CHUNK_COUNT]; #ifdef CHECK_WITH_VALGRIND slab_pool_count += 1; - slab_pool = static_cast(realloc(slab_pool, - slab_pool_count*sizeof(item_cell_u **))); + slab_pool = static_cast(realloc(slab_pool, + slab_pool_count*sizeof(item_cell_s **))); slab_pool[slab_pool_count-1] = chunk; #endif for (unsigned idx = 0 ; idx < CHUNK_COUNT ; idx += 1) { @@ -87,15 +87,16 @@ inline void* slab_t::alloc_slab() pool += CHUNK_COUNT; } - item_cell_u*cur = heap_; + item_cell_s*cur = heap_; heap_ = heap_->next; - return cur; + return cur->space; } template inline void slab_t::free_slab(void*ptr) { - item_cell_u*cur = reinterpret_cast (ptr); + char*with_header = reinterpret_cast(ptr) - sizeof(item_cell_s*); + item_cell_s*cur = reinterpret_cast (with_header); cur->next = heap_; heap_ = cur; }