Skip to content

Commit c4f2820

Browse files
author
Conor
committed
constexpred
1 parent 9d7b00b commit c4f2820

2 files changed

Lines changed: 133 additions & 66 deletions

File tree

src/batteries/slab_stack.cxx

Lines changed: 49 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -13,18 +13,17 @@ namespace lf {
1313
/**
1414
* @brief A slab_stack is a user-space stack backed by a single fixed-size slab of memory.
1515
*
16-
* The ctrl metadata and usable stack space are fused into a single allocation: a header
17-
* at the front of the slab is followed immediately by the usable nodes. There is no
18-
* segmentation, caching, or geometric growth — if the slab is full, push throws.
16+
* The ctrl metadata and usable stack space are fused into a single allocation: the first
17+
* node of the slab is the header, and the remaining `size` nodes are the usable stack
18+
* space. There is no segmentation, caching, or geometric growth — if the slab is full,
19+
* push throws.
1920
*
2021
* For this to conform to `worker_stack` the allocators void pointer type must be `void *`
2122
*/
2223
export template <allocator_of<std::byte> Allocator = std::allocator<std::byte>>
2324
class slab_stack {
2425

25-
// Alignment unit — all allocations are a multiple of this size.
26-
struct alignas(k_new_align) node {};
27-
static_assert(sizeof(node) == k_new_align);
26+
struct node; // Forward declaration so type aliases can reference node before its definition.
2827

2928
using node_traits = std::allocator_traits<Allocator>::template rebind_traits<node>;
3029
using node_alloc_t = node_traits::allocator_type;
@@ -33,22 +32,19 @@ class slab_stack {
3332
using size_int = node_traits::size_type;
3433
using diff_int = node_traits::difference_type;
3534

36-
// Fused ctrl+node header — lives at the very start of every slab allocation.
37-
// The usable stack space (size nodes) follows directly after the header region.
38-
struct slab {
35+
// Fused ctrl+node: the first element of every slab allocation.
36+
// node_alloc, sp_cache, and size live here; the `size` nodes that follow are
37+
// the usable stack space. Mirrors how geometric_stack stores ctrl data in its
38+
// first node — no reinterpret_cast is ever needed.
39+
struct alignas(k_new_align) node {
3940
[[no_unique_address]]
4041
node_alloc_t node_alloc; // Propagated to new owners on acquire.
4142
node_ptr sp_cache; // Stack pointer saved across release/acquire.
42-
diff_int size; // Usable node count in this slab.
43+
diff_int size; // Usable node count following this header.
4344
};
4445

45-
// Number of node-sized units occupied by the header at the front of each allocation.
46-
static constexpr diff_int k_header_nodes =
47-
safe_cast<diff_int>((sizeof(slab) + sizeof(node) - 1) / sizeof(node));
48-
49-
// Default capacity: fill one page minus the header.
50-
static constexpr diff_int k_default_nodes =
51-
safe_cast<diff_int>(k_page_size / sizeof(node)) - k_header_nodes;
46+
// Default capacity: one page of usable space (header occupies the first node).
47+
static constexpr diff_int k_default_nodes = safe_cast<diff_int>(k_page_size / sizeof(node)) - 1;
5248

5349
static_assert(k_default_nodes > 0);
5450

@@ -63,12 +59,13 @@ class slab_stack {
6359

6460
private:
6561
friend slab_stack;
66-
explicit constexpr checkpoint_t(slab *ptr) noexcept : m_slab(ptr) {}
67-
slab *m_slab = nullptr;
62+
explicit constexpr checkpoint_t(node_ptr ptr) noexcept : m_ctrl(ptr) {}
63+
node_ptr m_ctrl = nullptr;
6864
};
6965

7066
public:
7167
constexpr slab_stack() : slab_stack(Allocator{}) {}
68+
explicit constexpr slab_stack(diff_int num_nodes) : slab_stack(Allocator{}, num_nodes) {}
7269
explicit constexpr slab_stack(Allocator const &alloc, diff_int num_nodes = k_default_nodes)
7370
: m_alloc(alloc) {
7471
init_slab(num_nodes);
@@ -82,7 +79,7 @@ class slab_stack {
8279

8380
constexpr ~slab_stack() noexcept {
8481
LF_ASSUME(empty());
85-
free_slab(m_slab);
82+
free_ctrl(m_ctrl);
8683
}
8784

8885
/**
@@ -98,7 +95,7 @@ class slab_stack {
9895
*/
9996
[[nodiscard]]
10097
constexpr auto checkpoint() noexcept -> checkpoint_t {
101-
return checkpoint_t{m_slab};
98+
return checkpoint_t{m_ctrl};
10299
}
103100

104101
/**
@@ -142,18 +139,18 @@ class slab_stack {
142139

143140
[[nodiscard]]
144141
constexpr auto prepare_release() const noexcept -> release_t {
145-
// Guard against null release (failed prior allocation).
146-
if (m_slab != nullptr) {
147-
m_slab->sp_cache = m_sp;
142+
// Guard against null ctrl (failed prior allocation in release()).
143+
if (m_ctrl != nullptr) {
144+
m_ctrl->sp_cache = m_sp;
148145
}
149146
return release_t{key()};
150147
}
151148

152149
constexpr void release([[maybe_unused]] release_t) noexcept {
153-
diff_int next_size = (m_slab != nullptr) ? m_slab->size : k_default_nodes;
150+
diff_int next_size = (m_ctrl != nullptr) ? m_ctrl->size : k_default_nodes;
154151

155152
// Hand off the current slab to whoever holds the checkpoint; clear local state.
156-
m_slab = nullptr;
153+
m_ctrl = nullptr;
157154
m_lo = nullptr;
158155
m_sp = nullptr;
159156
m_hi = nullptr;
@@ -169,20 +166,20 @@ class slab_stack {
169166
constexpr void acquire(checkpoint_t ckpt) noexcept {
170167
LF_ASSUME(empty());
171168

172-
if (ckpt.m_slab == nullptr) {
169+
if (ckpt.m_ctrl == nullptr) {
173170
return;
174171
}
175172

176173
// Discard the fresh empty slab we prepared during release() (may be null on alloc failure).
177-
free_slab(m_slab);
174+
free_ctrl(m_ctrl);
178175

179-
m_slab = ckpt.m_slab;
176+
m_ctrl = ckpt.m_ctrl;
180177

181178
if constexpr (!node_traits::is_always_equal::value) {
182-
m_alloc = node_alloc_t{std::as_const(m_slab->node_alloc)};
179+
m_alloc = node_alloc_t{std::as_const(m_ctrl->node_alloc)};
183180
}
184181

185-
LF_ASSUME(m_slab != nullptr);
182+
LF_ASSUME(m_ctrl != nullptr);
186183

187184
load_local();
188185
}
@@ -191,46 +188,44 @@ class slab_stack {
191188
[[no_unique_address]]
192189
node_alloc_t m_alloc;
193190

194-
slab *m_slab = nullptr;
195-
node_ptr m_lo = nullptr; // Base of usable space in the current slab.
196-
node_ptr m_sp = nullptr; // Stack pointer for the current slab.
197-
node_ptr m_hi = nullptr; // One-past-the-end of usable space in the current slab.
191+
node_ptr m_ctrl = nullptr; // Header node (fused ctrl+first-node of the slab).
192+
node_ptr m_lo = nullptr; // Base of usable space (m_ctrl + 1).
193+
node_ptr m_sp = nullptr; // Stack pointer for the current slab.
194+
node_ptr m_hi = nullptr; // One-past-the-end of usable space.
198195

199-
// Restore local pointers from the slab header, taking sp from the cache.
196+
// Restore local pointers from the header node, taking sp from the cache.
200197
constexpr void load_local() noexcept {
201-
LF_ASSUME(m_slab != nullptr);
202-
node_ptr base = reinterpret_cast<node_ptr>(m_slab) + k_header_nodes;
203-
m_lo = base;
204-
m_hi = base + m_slab->size;
205-
m_sp = m_slab->sp_cache;
198+
LF_ASSUME(m_ctrl != nullptr);
199+
m_lo = m_ctrl + 1;
200+
m_hi = m_lo + m_ctrl->size;
201+
m_sp = m_ctrl->sp_cache;
206202
}
207203

208204
// Allocate and construct a fresh slab with num_nodes usable nodes.
209205
constexpr void init_slab(diff_int num_nodes) {
210206
LF_ASSUME(num_nodes > 0);
211207

212-
size_int total = safe_cast<size_int>(k_header_nodes + num_nodes);
213-
node_ptr raw = node_traits::allocate(m_alloc, total);
208+
size_int total = safe_cast<size_int>(1 + num_nodes);
209+
m_ctrl = node_traits::allocate(m_alloc, total);
214210

215211
LF_TRY {
216-
m_slab = std::construct_at(reinterpret_cast<slab *>(std::to_address(raw)), m_alloc, nullptr, num_nodes);
212+
node_traits::construct(m_alloc, m_ctrl, m_alloc, nullptr, num_nodes);
217213
} LF_CATCH_ALL {
218-
node_traits::deallocate(m_alloc, raw, total);
214+
node_traits::deallocate(m_alloc, m_ctrl, total);
215+
m_ctrl = nullptr;
219216
LF_RETHROW;
220217
}
221218

222-
node_ptr base = raw + k_header_nodes;
223-
m_lo = m_sp = base;
224-
m_hi = base + num_nodes;
219+
m_lo = m_sp = m_ctrl + 1;
220+
m_hi = m_lo + num_nodes;
225221
}
226222

227223
// Destroy and deallocate a slab (no-op if null).
228-
constexpr void free_slab(slab *s) noexcept {
229-
if (s != nullptr) {
230-
size_int total = safe_cast<size_int>(k_header_nodes + s->size);
231-
node_ptr raw = reinterpret_cast<node_ptr>(s);
232-
std::destroy_at(s);
233-
node_traits::deallocate(m_alloc, raw, total);
224+
constexpr void free_ctrl(node_ptr ctrl) noexcept {
225+
if (ctrl != nullptr) {
226+
size_int total = safe_cast<size_int>(1 + ctrl->size);
227+
node_traits::destroy(m_alloc, ctrl);
228+
node_traits::deallocate(m_alloc, ctrl, total);
234229
}
235230
}
236231

test/src/stack.cpp

Lines changed: 84 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -54,11 +54,12 @@ constexpr void check_non_empty(auto const &stack) {
5454

5555
} // namespace
5656

57-
TEMPLATE_TEST_CASE("Concept", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>) {
57+
TEMPLATE_TEST_CASE("Concept", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>, lf::slab_stack<>) {
5858
STATIC_REQUIRE(worker_stack<TestType>); //
5959
}
6060

61-
TEMPLATE_TEST_CASE("Basic push and pop", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>) {
61+
TEMPLATE_TEST_CASE("Basic push and pop", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>,
62+
lf::slab_stack<>) {
6263
TEST_CONSTEXPR([]() -> bool {
6364
TestType stack;
6465
check_empty(stack);
@@ -81,7 +82,8 @@ TEMPLATE_TEST_CASE("Basic push and pop", "[stacks]", lf::geometric_stack<>, lf::
8182
});
8283
}
8384

84-
TEMPLATE_TEST_CASE("Checkpoint and Acquire/Release", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>) {
85+
TEMPLATE_TEST_CASE("Checkpoint and Acquire/Release", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>,
86+
lf::slab_stack<>) {
8587
TEST_CONSTEXPR([]() -> bool {
8688
TestType stack1;
8789
void *p1 = stack1.push(100);
@@ -104,7 +106,7 @@ TEMPLATE_TEST_CASE("Checkpoint and Acquire/Release", "[stacks]", lf::geometric_s
104106
});
105107
}
106108

107-
TEMPLATE_TEST_CASE("Single pass", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>) {
109+
TEMPLATE_TEST_CASE("Single pass", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>, lf::slab_stack<>) {
108110
for (int k = 0; k < 10; ++k) {
109111

110112
TestType stack;
@@ -122,16 +124,21 @@ TEMPLATE_TEST_CASE("Single pass", "[stacks]", lf::geometric_stack<>, lf::adaptor
122124
std::vector<entry> entries;
123125
const std::size_t depth = depth_dist(rng);
124126

125-
// Push phase
127+
// Push phase — break early if slab_stack exhausts its fixed capacity
126128
for (std::size_t j = 0; j < depth; ++j) {
127129
std::size_t s = size_dist(rng);
128-
void *p = stack.push(s);
130+
void *p = nullptr;
131+
try {
132+
p = stack.push(s);
133+
} catch (std::bad_alloc const &) {
134+
break;
135+
}
129136
check_alignment(p);
130137
entries.push_back({.ptr = p, .size = s});
131138
}
132139

133-
// Pop phase (FILO)
134-
for (std::size_t j = depth; j > 0; --j) {
140+
// Pop phase (FILO) — use entries.size() in case push exited early
141+
for (std::size_t j = entries.size(); j > 0; --j) {
135142
auto const &e = entries[j - 1];
136143
stack.pop(e.ptr, e.size);
137144
}
@@ -141,7 +148,8 @@ TEMPLATE_TEST_CASE("Single pass", "[stacks]", lf::geometric_stack<>, lf::adaptor
141148
}
142149
}
143150

144-
TEMPLATE_TEST_CASE("Randomized push/pop", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>) {
151+
TEMPLATE_TEST_CASE("Randomized push/pop", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>,
152+
lf::slab_stack<>) {
145153
TestType stack;
146154
std::mt19937_64 rng{std::random_device{}()};
147155
std::bernoulli_distribution push_dist{0.51};
@@ -163,7 +171,12 @@ TEMPLATE_TEST_CASE("Randomized push/pop", "[stacks]", lf::geometric_stack<>, lf:
163171

164172
if (entries.empty() || push_dist(rng)) {
165173
std::size_t s = size_dist(rng);
166-
void *p = stack.push(s);
174+
void *p = nullptr;
175+
try {
176+
p = stack.push(s);
177+
} catch (std::bad_alloc const &) {
178+
break; // slab_stack exhausted; clean up and finish
179+
}
167180
check_alignment(p);
168181
entries.push_back({.ptr = p, .size = s});
169182
total_pushed++;
@@ -184,7 +197,8 @@ TEMPLATE_TEST_CASE("Randomized push/pop", "[stacks]", lf::geometric_stack<>, lf:
184197
check_empty(stack);
185198
}
186199

187-
TEMPLATE_TEST_CASE("Spikey randomized push/pop", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>) {
200+
TEMPLATE_TEST_CASE("Spikey randomized push/pop", "[stacks]", lf::geometric_stack<>, lf::adaptor_stack<>,
201+
lf::slab_stack<>) {
188202
TestType stack;
189203
std::mt19937_64 rng{std::random_device{}()};
190204

@@ -217,7 +231,12 @@ TEMPLATE_TEST_CASE("Spikey randomized push/pop", "[stacks]", lf::geometric_stack
217231

218232
if (do_push) {
219233
std::size_t s = size_dist(rng);
220-
void *p = stack.push(s);
234+
void *p = nullptr;
235+
try {
236+
p = stack.push(s);
237+
} catch (std::bad_alloc const &) {
238+
break; // slab_stack exhausted; clean up and finish
239+
}
221240
check_alignment(p);
222241
entries.push_back({.ptr = p, .size = s});
223242
total_pushed++;
@@ -238,3 +257,56 @@ TEMPLATE_TEST_CASE("Spikey randomized push/pop", "[stacks]", lf::geometric_stack
238257
}
239258
check_empty(stack);
240259
}
260+
261+
// ---- slab_stack specific ----
262+
//
263+
// Tests that exercise behaviour unique to slab_stack's fixed-size design.
264+
265+
TEST_CASE("slab_stack - throws when full", "[stacks]") {
266+
// Use a tiny slab (2 usable nodes) to exercise the overflow path precisely.
267+
lf::slab_stack<> stack(2);
268+
269+
void *p1 = stack.push(k_new_align);
270+
void *p2 = stack.push(k_new_align);
271+
REQUIRE_THROWS_AS(stack.push(k_new_align), std::bad_alloc);
272+
273+
stack.pop(p2, k_new_align);
274+
stack.pop(p1, k_new_align);
275+
check_empty(stack);
276+
}
277+
278+
TEST_CASE("slab_stack - single pass", "[stacks]") {
279+
for (int k = 0; k < 10; ++k) {
280+
// Slab sized to hold the worst-case live footprint without early exit:
281+
// depth_max (5000) * roundup(size_max (200), k_new_align=16) / k_new_align
282+
// = 5000 * 13 = 65 000 nodes, with headroom.
283+
lf::slab_stack<> stack(70'000);
284+
std::mt19937_64 rng{std::random_device{}()};
285+
std::uniform_int_distribution<std::size_t> size_dist{1, 200};
286+
std::uniform_int_distribution<std::size_t> depth_dist{5, 5000};
287+
288+
struct entry {
289+
void *ptr;
290+
std::size_t size;
291+
};
292+
293+
for (int i = 0; i < 2; ++i) {
294+
std::vector<entry> entries;
295+
const std::size_t depth = depth_dist(rng);
296+
297+
for (std::size_t j = 0; j < depth; ++j) {
298+
std::size_t s = size_dist(rng);
299+
void *p = stack.push(s);
300+
check_alignment(p);
301+
entries.push_back({.ptr = p, .size = s});
302+
}
303+
304+
for (std::size_t j = depth; j > 0; --j) {
305+
auto const &e = entries[j - 1];
306+
stack.pop(e.ptr, e.size);
307+
}
308+
309+
check_empty(stack);
310+
}
311+
}
312+
}

0 commit comments

Comments
 (0)