12 #include "allocator_base.hpp"
13 #include "internal/noatomic.hpp"
14 #include "internal/reallocator.hpp"
19 inline namespace v_100 {
37 , allocatedThisSize{ 0 }
41 Node(Node &&x) noexcept
46 Node &operator=(Node &&x) noexcept
48 allocator = std::move(x.allocator);
50 allocatedThisSize = x.allocatedThisSize;
53 x.allocatedThisSize = 0;
60 size_t allocatedThisSize;
65 block allocate_no_grow(
size_t n) noexcept
68 auto p = root_.load();
70 result = p->allocator.allocate(n);
74 if (!p->next.load()) {
82 Node *create_node() noexcept
88 auto nodeBlock = nodeOnStack.allocator.allocate(
sizeof(Node));
90 nodeOnStack.allocatedThisSize = nodeBlock.length;
91 auto result =
static_cast<Node *
>(nodeBlock.ptr);
101 *result = std::move(nodeOnStack);
109 void erase_node(Node *n) noexcept
114 if (n->next.load()) {
116 erase_node(n->next.load());
123 stackNode = std::move(*n);
124 block allocatedBlock(n, stackNode.allocatedThisSize);
126 stackNode.allocator.deallocate(allocatedBlock);
129 void shrink() noexcept
131 erase_node(root_.load());
134 Node *find_owning_node(
const block &b)
const noexcept
136 auto p = root_.load();
138 if (p->allocator.owns(b)) {
150 using allocator = Allocator;
152 static constexpr
bool supports_truncated_deallocation = Allocator::supports_truncated_deallocation;
153 static constexpr
unsigned alignment = Allocator::alignment;
160 static constexpr
size_t good_size(
size_t n) {
161 return Allocator::good_size(n);
166 *
this = std::move(x);
175 root_ = std::move(x.root_);
198 block result = allocate_no_grow(n);
204 if (root_.load() ==
nullptr) {
205 auto firstNode = create_node();
206 Node *nullNode =
nullptr;
208 if (!root_.compare_exchange_weak(nullNode, firstNode)) {
209 erase_node(firstNode);
212 result = allocate_no_grow(n);
219 auto newNode = create_node();
220 Node *nullNode =
nullptr;
221 auto p = root_.load();
224 while (p->next.load() !=
nullptr) {
227 }
while (!p->next.compare_exchange_weak(nullNode, newNode));
229 result = allocate_no_grow(n);
243 assert(!
"It is not wise to let me deallocate a foreign Block!");
247 auto p = find_owning_node(b);
249 p->allocator.deallocate(b);
263 if (internal::is_reallocation_handled_default(*
this, b, n)) {
267 auto p = find_owning_node(b);
272 if (p->allocator.reallocate(b, n)) {
286 template<
typename U = Allocator>
287 typename std::enable_if<traits::has_expand<U>::value,
bool>::type
290 auto p = find_owning_node(b);
294 return p->allocator.expand(b, delta);
304 return find_owning_node(b) !=
nullptr;
313 template <
typename U = Allocator>
314 typename std::enable_if<traits::has_deallocate_all<U>::value,
void>::type
328 template <
class Allocator>
343 template <
class Allocator>
351 using namespace v_100;
std::enable_if< traits::has_deallocate_all< U >::value, void >::type deallocate_all() noexcept
block allocate(size_t n) noexcept
void deallocate(block &b) noexcept
bool reallocate(block &b, size_t n) noexcept
bool reallocate_with_copy(OldAllocator &oldAllocator, NewAllocator &newAllocator, block &b, size_t n) noexcept
bool owns(const block &b) const noexcept
std::enable_if< traits::has_expand< U >::value, bool >::type expand(block &b, size_t delta) noexcept
~cascading_allocator_base()