@@ -13,18 +13,17 @@ namespace lf {
1313/* *
1414 * @brief A slab_stack is a user-space stack backed by a single fixed-size slab of memory.
1515 *
16- * The ctrl metadata and usable stack space are fused into a single allocation: a header
17- * at the front of the slab is followed immediately by the usable nodes. There is no
18- * segmentation, caching, or geometric growth — if the slab is full, push throws.
16+ * The ctrl metadata and usable stack space are fused into a single allocation: the first
17+ * node of the slab is the header, and the remaining `size` nodes are the usable stack
18+ * space. There is no segmentation, caching, or geometric growth — if the slab is full,
19+ * push throws.
1920 *
2021 * For this to conform to `worker_stack` the allocators void pointer type must be `void *`
2122 */
2223export template <allocator_of<std::byte> Allocator = std::allocator<std::byte>>
2324class slab_stack {
2425
25- // Alignment unit — all allocations are a multiple of this size.
26- struct alignas (k_new_align) node {};
27- static_assert (sizeof (node) == k_new_align);
26+ struct node ; // Forward declaration so type aliases can reference node before its definition.
2827
2928 using node_traits = std::allocator_traits<Allocator>::template rebind_traits<node>;
3029 using node_alloc_t = node_traits::allocator_type;
@@ -33,22 +32,19 @@ class slab_stack {
3332 using size_int = node_traits::size_type;
3433 using diff_int = node_traits::difference_type;
3534
36- // Fused ctrl+node header — lives at the very start of every slab allocation.
37- // The usable stack space (size nodes) follows directly after the header region.
38- struct slab {
35+ // Fused ctrl+node: the first element of every slab allocation.
36+ // node_alloc, sp_cache, and size live here; the `size` nodes that follow are
37+ // the usable stack space. Mirrors how geometric_stack stores ctrl data in its
38+ // first node — no reinterpret_cast is ever needed.
39+ struct alignas (k_new_align) node {
3940 [[no_unique_address]]
4041 node_alloc_t node_alloc; // Propagated to new owners on acquire.
4142 node_ptr sp_cache; // Stack pointer saved across release/acquire.
42- diff_int size; // Usable node count in this slab .
43+ diff_int size; // Usable node count following this header .
4344 };
4445
45- // Number of node-sized units occupied by the header at the front of each allocation.
46- static constexpr diff_int k_header_nodes =
47- safe_cast<diff_int>((sizeof (slab) + sizeof (node) - 1 ) / sizeof (node));
48-
49- // Default capacity: fill one page minus the header.
50- static constexpr diff_int k_default_nodes =
51- safe_cast<diff_int>(k_page_size / sizeof (node)) - k_header_nodes;
46+ // Default capacity: one page of usable space (header occupies the first node).
47+ static constexpr diff_int k_default_nodes = safe_cast<diff_int>(k_page_size / sizeof (node)) - 1 ;
5248
5349 static_assert (k_default_nodes > 0 );
5450
@@ -63,12 +59,13 @@ class slab_stack {
6359
6460 private:
6561 friend slab_stack;
66- explicit constexpr checkpoint_t (slab * ptr) noexcept : m_slab (ptr) {}
67- slab *m_slab = nullptr ;
62+ explicit constexpr checkpoint_t (node_ptr ptr) noexcept : m_ctrl (ptr) {}
63+ node_ptr m_ctrl = nullptr ;
6864 };
6965
7066 public:
7167 constexpr slab_stack () : slab_stack(Allocator{}) {}
68+ explicit constexpr slab_stack (diff_int num_nodes) : slab_stack(Allocator{}, num_nodes) {}
7269 explicit constexpr slab_stack (Allocator const &alloc, diff_int num_nodes = k_default_nodes)
7370 : m_alloc(alloc) {
7471 init_slab (num_nodes);
@@ -82,7 +79,7 @@ class slab_stack {
8279
8380 constexpr ~slab_stack () noexcept {
8481 LF_ASSUME (empty ());
85- free_slab (m_slab );
82+ free_ctrl (m_ctrl );
8683 }
8784
8885 /* *
@@ -98,7 +95,7 @@ class slab_stack {
9895 */
9996 [[nodiscard]]
10097 constexpr auto checkpoint () noexcept -> checkpoint_t {
101- return checkpoint_t {m_slab };
98+ return checkpoint_t {m_ctrl };
10299 }
103100
104101 /* *
@@ -142,18 +139,18 @@ class slab_stack {
142139
143140 [[nodiscard]]
144141 constexpr auto prepare_release () const noexcept -> release_t {
145- // Guard against null release (failed prior allocation).
146- if (m_slab != nullptr ) {
147- m_slab ->sp_cache = m_sp;
142+ // Guard against null ctrl (failed prior allocation in release() ).
143+ if (m_ctrl != nullptr ) {
144+ m_ctrl ->sp_cache = m_sp;
148145 }
149146 return release_t {key ()};
150147 }
151148
152149 constexpr void release ([[maybe_unused]] release_t ) noexcept {
153- diff_int next_size = (m_slab != nullptr ) ? m_slab ->size : k_default_nodes;
150+ diff_int next_size = (m_ctrl != nullptr ) ? m_ctrl ->size : k_default_nodes;
154151
155152 // Hand off the current slab to whoever holds the checkpoint; clear local state.
156- m_slab = nullptr ;
153+ m_ctrl = nullptr ;
157154 m_lo = nullptr ;
158155 m_sp = nullptr ;
159156 m_hi = nullptr ;
@@ -169,20 +166,20 @@ class slab_stack {
169166 constexpr void acquire (checkpoint_t ckpt) noexcept {
170167 LF_ASSUME (empty ());
171168
172- if (ckpt.m_slab == nullptr ) {
169+ if (ckpt.m_ctrl == nullptr ) {
173170 return ;
174171 }
175172
176173 // Discard the fresh empty slab we prepared during release() (may be null on alloc failure).
177- free_slab (m_slab );
174+ free_ctrl (m_ctrl );
178175
179- m_slab = ckpt.m_slab ;
176+ m_ctrl = ckpt.m_ctrl ;
180177
181178 if constexpr (!node_traits::is_always_equal::value) {
182- m_alloc = node_alloc_t {std::as_const (m_slab ->node_alloc )};
179+ m_alloc = node_alloc_t {std::as_const (m_ctrl ->node_alloc )};
183180 }
184181
185- LF_ASSUME (m_slab != nullptr );
182+ LF_ASSUME (m_ctrl != nullptr );
186183
187184 load_local ();
188185 }
@@ -191,46 +188,44 @@ class slab_stack {
191188 [[no_unique_address]]
192189 node_alloc_t m_alloc;
193190
194- slab *m_slab = nullptr ;
195- node_ptr m_lo = nullptr ; // Base of usable space in the current slab .
196- node_ptr m_sp = nullptr ; // Stack pointer for the current slab.
197- node_ptr m_hi = nullptr ; // One-past-the-end of usable space in the current slab .
191+ node_ptr m_ctrl = nullptr ; // Header node (fused ctrl+first-node of the slab).
192+ node_ptr m_lo = nullptr ; // Base of usable space (m_ctrl + 1) .
193+ node_ptr m_sp = nullptr ; // Stack pointer for the current slab.
194+ node_ptr m_hi = nullptr ; // One-past-the-end of usable space.
198195
199- // Restore local pointers from the slab header, taking sp from the cache.
196+ // Restore local pointers from the header node , taking sp from the cache.
200197 constexpr void load_local () noexcept {
201- LF_ASSUME (m_slab != nullptr );
202- node_ptr base = reinterpret_cast <node_ptr>(m_slab) + k_header_nodes;
203- m_lo = base;
204- m_hi = base + m_slab->size ;
205- m_sp = m_slab->sp_cache ;
198+ LF_ASSUME (m_ctrl != nullptr );
199+ m_lo = m_ctrl + 1 ;
200+ m_hi = m_lo + m_ctrl->size ;
201+ m_sp = m_ctrl->sp_cache ;
206202 }
207203
208204 // Allocate and construct a fresh slab with num_nodes usable nodes.
209205 constexpr void init_slab (diff_int num_nodes) {
210206 LF_ASSUME (num_nodes > 0 );
211207
212- size_int total = safe_cast<size_int>(k_header_nodes + num_nodes);
213- node_ptr raw = node_traits::allocate (m_alloc, total);
208+ size_int total = safe_cast<size_int>(1 + num_nodes);
209+ m_ctrl = node_traits::allocate (m_alloc, total);
214210
215211 LF_TRY {
216- m_slab = std::construct_at ( reinterpret_cast <slab *>( std::to_address (raw)) , m_alloc, nullptr , num_nodes);
212+ node_traits::construct (m_alloc, m_ctrl , m_alloc, nullptr , num_nodes);
217213 } LF_CATCH_ALL {
218- node_traits::deallocate (m_alloc, raw, total);
214+ node_traits::deallocate (m_alloc, m_ctrl, total);
215+ m_ctrl = nullptr ;
219216 LF_RETHROW;
220217 }
221218
222- node_ptr base = raw + k_header_nodes;
223- m_lo = m_sp = base;
224- m_hi = base + num_nodes;
219+ m_lo = m_sp = m_ctrl + 1 ;
220+ m_hi = m_lo + num_nodes;
225221 }
226222
227223 // Destroy and deallocate a slab (no-op if null).
228- constexpr void free_slab (slab *s) noexcept {
229- if (s != nullptr ) {
230- size_int total = safe_cast<size_int>(k_header_nodes + s->size );
231- node_ptr raw = reinterpret_cast <node_ptr>(s);
232- std::destroy_at (s);
233- node_traits::deallocate (m_alloc, raw, total);
224+ constexpr void free_ctrl (node_ptr ctrl) noexcept {
225+ if (ctrl != nullptr ) {
226+ size_int total = safe_cast<size_int>(1 + ctrl->size );
227+ node_traits::destroy (m_alloc, ctrl);
228+ node_traits::deallocate (m_alloc, ctrl, total);
234229 }
235230 }
236231
0 commit comments