15 #ifndef RAPIDJSON_ALLOCATORS_H_
16 #define RAPIDJSON_ALLOCATORS_H_
19 #include "internal/meta.h"
23 #if RAPIDJSON_HAS_CXX11
24 #include <type_traits>
27 RAPIDJSON_NAMESPACE_BEGIN
70 #ifndef RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY
71 #define RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY (64 * 1024)
84 static const bool kNeedFree =
true;
85 void* Malloc(
size_t size) {
91 void* Realloc(
void* originalPtr,
size_t originalSize,
size_t newSize) {
99 static void Free(
void *ptr) RAPIDJSON_NOEXCEPT {
RAPIDJSON_FREE(ptr); }
101 bool operator==(
const CrtAllocator&)
const RAPIDJSON_NOEXCEPT {
104 bool operator!=(
const CrtAllocator&)
const RAPIDJSON_NOEXCEPT {
128 template <
typename BaseAllocator = CrtAllocator>
140 ChunkHeader *chunkHead;
141 BaseAllocator* ownBaseAllocator;
146 static const size_t SIZEOF_SHARED_DATA =
RAPIDJSON_ALIGN(
sizeof(SharedData));
147 static const size_t SIZEOF_CHUNK_HEADER =
RAPIDJSON_ALIGN(
sizeof(ChunkHeader));
149 static inline ChunkHeader *GetChunkHead(SharedData *shared)
151 return reinterpret_cast<ChunkHeader*>(reinterpret_cast<uint8_t*>(shared) + SIZEOF_SHARED_DATA);
153 static inline uint8_t *GetChunkBuffer(SharedData *shared)
155 return reinterpret_cast<uint8_t*>(shared->chunkHead) + SIZEOF_CHUNK_HEADER;
161 static const bool kNeedFree =
false;
162 static const bool kRefCounted =
true;
170 chunk_capacity_(chunkSize),
171 baseAllocator_(baseAllocator ? baseAllocator :
RAPIDJSON_NEW(BaseAllocator)()),
172 shared_(static_cast<SharedData*>(baseAllocator_ ? baseAllocator_->Malloc(SIZEOF_SHARED_DATA + SIZEOF_CHUNK_HEADER) : 0))
177 shared_->ownBaseAllocator = 0;
180 shared_->ownBaseAllocator = baseAllocator_;
182 shared_->chunkHead = GetChunkHead(shared_);
183 shared_->chunkHead->capacity = 0;
184 shared_->chunkHead->size = 0;
185 shared_->chunkHead->next = 0;
186 shared_->ownBuffer =
true;
187 shared_->refcount = 1;
200 MemoryPoolAllocator(
void *buffer,
size_t size,
size_t chunkSize = kDefaultChunkCapacity, BaseAllocator* baseAllocator = 0) :
201 chunk_capacity_(chunkSize),
202 baseAllocator_(baseAllocator),
203 shared_(static_cast<SharedData*>(AlignBuffer(buffer, size)))
206 shared_->chunkHead = GetChunkHead(shared_);
207 shared_->chunkHead->capacity = size - SIZEOF_SHARED_DATA - SIZEOF_CHUNK_HEADER;
208 shared_->chunkHead->size = 0;
209 shared_->chunkHead->next = 0;
210 shared_->ownBaseAllocator = 0;
211 shared_->ownBuffer =
false;
212 shared_->refcount = 1;
216 chunk_capacity_(rhs.chunk_capacity_),
217 baseAllocator_(rhs.baseAllocator_),
223 MemoryPoolAllocator& operator=(
const MemoryPoolAllocator& rhs) RAPIDJSON_NOEXCEPT
226 ++rhs.shared_->refcount;
227 this->~MemoryPoolAllocator();
228 baseAllocator_ = rhs.baseAllocator_;
229 chunk_capacity_ = rhs.chunk_capacity_;
230 shared_ = rhs.shared_;
234 #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
235 MemoryPoolAllocator(MemoryPoolAllocator&& rhs) RAPIDJSON_NOEXCEPT :
236 chunk_capacity_(rhs.chunk_capacity_),
237 baseAllocator_(rhs.baseAllocator_),
243 MemoryPoolAllocator& operator=(MemoryPoolAllocator&& rhs) RAPIDJSON_NOEXCEPT
246 this->~MemoryPoolAllocator();
247 baseAllocator_ = rhs.baseAllocator_;
248 chunk_capacity_ = rhs.chunk_capacity_;
249 shared_ = rhs.shared_;
263 if (shared_->refcount > 1) {
268 BaseAllocator *a = shared_->ownBaseAllocator;
269 if (shared_->ownBuffer) {
270 baseAllocator_->Free(shared_);
279 ChunkHeader* c = shared_->chunkHead;
283 shared_->chunkHead = c->next;
284 baseAllocator_->Free(c);
286 shared_->chunkHead->size = 0;
295 for (ChunkHeader* c = shared_->chunkHead; c != 0; c = c->next)
296 capacity += c->capacity;
303 size_t Size() const RAPIDJSON_NOEXCEPT {
306 for (ChunkHeader* c = shared_->chunkHead; c != 0; c = c->next)
316 return shared_->refcount > 1;
326 if (
RAPIDJSON_UNLIKELY(shared_->chunkHead->size + size > shared_->chunkHead->capacity))
327 if (!AddChunk(chunk_capacity_ > size ? chunk_capacity_ : size))
330 void *buffer = GetChunkBuffer(shared_) + shared_->chunkHead->size;
331 shared_->chunkHead->size += size;
336 void*
Realloc(
void* originalPtr,
size_t originalSize,
size_t newSize) {
337 if (originalPtr == 0)
338 return Malloc(newSize);
348 if (originalSize >= newSize)
352 if (originalPtr == GetChunkBuffer(shared_) + shared_->chunkHead->size - originalSize) {
353 size_t increment = static_cast<size_t>(newSize - originalSize);
354 if (shared_->chunkHead->size + increment <= shared_->chunkHead->capacity) {
355 shared_->chunkHead->size += increment;
361 if (
void* newBuffer = Malloc(newSize)) {
363 std::memcpy(newBuffer, originalPtr, originalSize);
371 static void Free(
void *ptr) RAPIDJSON_NOEXCEPT { (void)ptr; }
377 return shared_ == rhs.shared_;
381 return !operator==(rhs);
389 bool AddChunk(
size_t capacity) {
391 shared_->ownBaseAllocator = baseAllocator_ =
RAPIDJSON_NEW(BaseAllocator)();
392 if (ChunkHeader* chunk = static_cast<ChunkHeader*>(baseAllocator_->Malloc(SIZEOF_CHUNK_HEADER + capacity))) {
393 chunk->capacity = capacity;
395 chunk->next = shared_->chunkHead;
396 shared_->chunkHead = chunk;
403 static inline void* AlignBuffer(
void* buf,
size_t &size)
406 const uintptr_t mask =
sizeof(
void*) - 1;
407 const uintptr_t ubuf = reinterpret_cast<uintptr_t>(buf);
409 const uintptr_t abuf = (ubuf + mask) & ~mask;
411 buf = reinterpret_cast<void*>(abuf);
417 size_t chunk_capacity_;
418 BaseAllocator* baseAllocator_;
423 template<
typename,
typename =
void>
424 struct IsRefCounted :
428 struct IsRefCounted<T, typename internal::EnableIfCond<T::kRefCounted>
::Type> :
433 template<
typename T,
typename A>
434 inline T* Realloc(A& a, T* old_p,
size_t old_n,
size_t new_n)
437 return static_cast<T*>(a.Realloc(old_p, old_n *
sizeof(T), new_n *
sizeof(T)));
440 template<
typename T,
typename A>
441 inline T *Malloc(A& a,
size_t n = 1)
443 return Realloc<T, A>(a, NULL, 0, n);
446 template<
typename T,
typename A>
447 inline void Free(A& a, T *p,
size_t n = 1)
449 static_cast<void>(Realloc<T, A>(a, p, n, 0));
454 RAPIDJSON_DIAG_OFF(effc++)
457 template <
typename T,
typename BaseAllocator = CrtAllocator>
459 public std::allocator<T>
461 typedef std::allocator<T> allocator_type;
462 #if RAPIDJSON_HAS_CXX11
463 typedef std::allocator_traits<allocator_type> traits_type;
465 typedef allocator_type traits_type;
469 typedef BaseAllocator BaseAllocatorType;
478 baseAllocator_(rhs.baseAllocator_)
484 baseAllocator_(rhs.baseAllocator_)
487 #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
489 allocator_type(std::move(rhs)),
490 baseAllocator_(std::move(rhs.baseAllocator_))
493 #if RAPIDJSON_HAS_CXX11
494 using propagate_on_container_move_assignment = std::true_type;
495 using propagate_on_container_swap = std::true_type;
499 StdAllocator(
const BaseAllocator& allocator) RAPIDJSON_NOEXCEPT :
501 baseAllocator_(allocator)
512 typedef typename traits_type::size_type size_type;
513 typedef typename traits_type::difference_type difference_type;
515 typedef typename traits_type::value_type value_type;
516 typedef typename traits_type::pointer pointer;
517 typedef typename traits_type::const_pointer const_pointer;
519 #if RAPIDJSON_HAS_CXX11
521 typedef typename std::add_lvalue_reference<value_type>::type &reference;
522 typedef typename std::add_lvalue_reference<typename std::add_const<value_type>::type>::type &const_reference;
524 pointer address(reference r)
const RAPIDJSON_NOEXCEPT
526 return std::addressof(r);
528 const_pointer address(const_reference r)
const RAPIDJSON_NOEXCEPT
530 return std::addressof(r);
533 size_type max_size() const RAPIDJSON_NOEXCEPT
535 return traits_type::max_size(*
this);
538 template <
typename ...Args>
539 void construct(pointer p, Args&&... args)
541 traits_type::construct(*
this, p, std::forward<Args>(args)...);
543 void destroy(pointer p)
545 traits_type::destroy(*
this, p);
548 #else // !RAPIDJSON_HAS_CXX11
550 typedef typename allocator_type::reference reference;
551 typedef typename allocator_type::const_reference const_reference;
553 pointer address(reference r)
const RAPIDJSON_NOEXCEPT
555 return allocator_type::address(r);
557 const_pointer address(const_reference r)
const RAPIDJSON_NOEXCEPT
559 return allocator_type::address(r);
562 size_type max_size() const RAPIDJSON_NOEXCEPT
564 return allocator_type::max_size();
567 void construct(pointer p, const_reference r)
569 allocator_type::construct(p, r);
571 void destroy(pointer p)
573 allocator_type::destroy(p);
576 #endif // !RAPIDJSON_HAS_CXX11
578 template <
typename U>
579 U* allocate(size_type n = 1,
const void* = 0)
581 return RAPIDJSON_NAMESPACE::Malloc<U>(baseAllocator_, n);
583 template <
typename U>
584 void deallocate(U* p, size_type n = 1)
586 RAPIDJSON_NAMESPACE::Free<U>(baseAllocator_, p, n);
589 pointer allocate(size_type n = 1,
const void* = 0)
591 return allocate<value_type>(n);
593 void deallocate(pointer p, size_type n = 1)
595 deallocate<value_type>(p, n);
598 #if RAPIDJSON_HAS_CXX11
599 using is_always_equal = std::is_empty<BaseAllocator>;
603 bool operator==(
const StdAllocator<U, BaseAllocator>& rhs)
const RAPIDJSON_NOEXCEPT
605 return baseAllocator_ == rhs.baseAllocator_;
608 bool operator!=(
const StdAllocator<U, BaseAllocator>& rhs)
const RAPIDJSON_NOEXCEPT
610 return !operator==(rhs);
614 static const bool kNeedFree = BaseAllocator::kNeedFree;
616 void* Malloc(
size_t size)
618 return baseAllocator_.Malloc(size);
620 void* Realloc(
void* originalPtr,
size_t originalSize,
size_t newSize)
622 return baseAllocator_.Realloc(originalPtr, originalSize, newSize);
624 static void Free(
void *ptr) RAPIDJSON_NOEXCEPT
626 BaseAllocator::Free(ptr);
630 template <
typename,
typename>
631 friend class StdAllocator;
633 BaseAllocator baseAllocator_;
636 #if !RAPIDJSON_HAS_CXX17 // std::allocator<void> deprecated in C++17
637 template <
typename BaseAllocator>
639 public std::allocator<void>
641 typedef std::allocator<void> allocator_type;
644 typedef BaseAllocator BaseAllocatorType;
653 baseAllocator_(rhs.baseAllocator_)
659 baseAllocator_(rhs.baseAllocator_)
663 StdAllocator(
const BaseAllocator& baseAllocator) RAPIDJSON_NOEXCEPT :
665 baseAllocator_(baseAllocator)
676 typedef typename allocator_type::value_type value_type;
679 template <
typename,
typename>
682 BaseAllocator baseAllocator_;
690 RAPIDJSON_NAMESPACE_END
692 #endif // RAPIDJSON_ENCODINGS_H_