1010
1111namespace fast_alloc
1212{
13- ThreadSafePoolAllocator::ThreadSafePoolAllocator (const std::size_t block_size, const std::size_t block_count)
13+ ThreadSafePoolAllocator::ThreadSafePoolAllocator (std::size_t block_size, std::size_t block_count)
1414 : block_size_(block_size)
1515 , block_count_(block_count)
1616 , allocated_count_(0 )
1717 , memory_(nullptr )
18- , free_list_({ nullptr , 0 } )
18+ , free_list_(nullptr )
1919 {
2020 assert (block_size >= sizeof (void *) && " Block size must be at least pointer size" );
2121 assert (block_count > 0 && " Block count must be greater than zero" );
@@ -29,21 +29,21 @@ namespace fast_alloc
2929 assert (memory_ && " Failed to allocate memory pool" );
3030
3131 // Initialise free list - each block points to the next
32- auto * block = static_cast <std::byte*>(memory_);
32+ std::byte * block = static_cast <std::byte*>(memory_);
3333
3434 for (std::size_t i = 0 ; i < block_count_ - 1 ; ++i)
3535 {
36- const auto current = reinterpret_cast <void **>(block);
36+ void ** current = reinterpret_cast <void **>(block);
3737 block += block_size_;
3838 *current = block;
3939 }
4040
4141 // Last block points to nullptr
42- const auto last = reinterpret_cast <void **>(block);
42+ void ** last = reinterpret_cast <void **>(block);
4343 *last = nullptr ;
4444
4545 // Set initial free list head
46- free_list_.store ({ memory_, 0 }, std::memory_order_relaxed );
46+ free_list_.store (memory_, std::memory_order_release );
4747 }
4848
4949 ThreadSafePoolAllocator::~ThreadSafePoolAllocator ()
@@ -60,25 +60,25 @@ namespace fast_alloc
6060
6161 void * ThreadSafePoolAllocator::allocate ()
6262 {
63- TaggedPointer old_head = free_list_.load (std::memory_order_acquire);
63+ void * old_head = free_list_.load (std::memory_order_acquire);
6464
65- while (old_head. ptr != nullptr )
65+ while (old_head != nullptr )
6666 {
6767 // Read next pointer from the block
68- void * next = *static_cast <void **>(old_head. ptr );
68+ void * next = *static_cast <void **>(old_head);
6969
70- // Try to update head to next with incremented tag
71-
72- // CAS: if free_list_ still equals old_head, update to new_head
73- if (const TaggedPointer new_head = {next, old_head. tag + 1 }; free_list_.compare_exchange_weak (
70+ // Try to update head to next
71+ // Note: This has ABA problem potential, but acceptable for pool allocator
72+ // since blocks are never freed back to OS during allocator lifetime
73+ if (free_list_.compare_exchange_weak (
7474 old_head,
75- new_head ,
75+ next ,
7676 std::memory_order_release,
7777 std::memory_order_acquire))
7878 {
7979 // Successfully allocated
8080 allocated_count_.fetch_add (1 , std::memory_order_relaxed);
81- return old_head. ptr ;
81+ return old_head;
8282 }
8383
8484 // CAS failed, old_head now contains the new value, retry
@@ -95,22 +95,18 @@ namespace fast_alloc
9595 return ;
9696 }
9797
98- TaggedPointer old_head = free_list_.load (std::memory_order_acquire);
99- TaggedPointer new_head{};
98+ void * old_head = free_list_.load (std::memory_order_acquire);
10099
101100 do
102101 {
103102 // Make this block point to current head
104- *static_cast <void **>(ptr) = old_head.ptr ;
105-
106- // New head points to this block with incremented tag
107- new_head = {ptr, old_head.tag + 1 };
103+ *static_cast <void **>(ptr) = old_head;
108104
109- // CAS: if free_list_ still equals old_head, update to new_head
105+ // Try to make this block the new head
110106 }
111107 while (!free_list_.compare_exchange_weak (
112108 old_head,
113- new_head ,
109+ ptr ,
114110 std::memory_order_release,
115111 std::memory_order_acquire));
116112
0 commit comments