|
| 1 | +#ifndef ARENA_DYN_VAR_H |
| 2 | +#define ARENA_DYN_VAR_H |
| 3 | +#include <cstddef> |
| 4 | + |
| 5 | +namespace builder { |
| 6 | + |
| 7 | +using byte_t = unsigned char; |
| 8 | +struct arena_list { |
| 9 | + std::vector<byte_t*> chunks; |
| 10 | + size_t used_objects = 0; |
| 11 | +}; |
| 12 | + |
| 13 | +constexpr const int arena_objects_per_chunk = 64; |
| 14 | + |
| 15 | +// Registry type for types that are allocatable |
| 16 | +// Each global construct call assigns a unique id to each type |
| 17 | +struct allocatable_type_registry { |
| 18 | + // Declaration for unique counter, definition is in cpp |
| 19 | + static int type_counter; |
| 20 | + |
| 21 | + // A function that can destroy a list of objects of a type |
| 22 | + using deleter_t = void(*)(arena_list*); |
| 23 | + |
| 24 | + // This has to be a pointer because we cannot guarantee order of invocation of constructors |
| 25 | + static std::vector<deleter_t> *type_deleters; |
| 26 | + |
| 27 | + int type_id; |
| 28 | + |
| 29 | + // Grab an ID and increment |
| 30 | + allocatable_type_registry(deleter_t deleter) { |
| 31 | + if (type_deleters == nullptr) { |
| 32 | + type_deleters = new std::vector<deleter_t>(); |
| 33 | + } |
| 34 | + type_deleters->push_back(deleter); |
| 35 | + type_id = type_counter; |
| 36 | + type_counter++; |
| 37 | + } |
| 38 | + |
| 39 | + // Post global constructor, this returns the maximum |
| 40 | + // type id. Make sure this is ONLY called from main |
| 41 | + static int get_max_type_id(void) { |
| 42 | + return type_counter; |
| 43 | + } |
| 44 | +}; |
| 45 | + |
| 46 | +// This type is instantiated for each |
| 47 | +// type that is allocated in the whole binary |
| 48 | +template <typename T> |
| 49 | +struct allocatable_type_manager { |
| 50 | + static allocatable_type_registry register_type; |
| 51 | + static void delete_objects(arena_list *list) { |
| 52 | + for (size_t i = 0; i < list->used_objects; i++) { |
| 53 | + int chunk_id = i / arena_objects_per_chunk; |
| 54 | + int chunk_offset = i % arena_objects_per_chunk; |
| 55 | + |
| 56 | + auto ptr_b = list->chunks[chunk_id] + sizeof(T) * chunk_offset; |
| 57 | + auto ptr = (T*)ptr_b; |
| 58 | + ptr->~T(); |
| 59 | + } |
| 60 | + list->used_objects = 0; |
| 61 | + } |
| 62 | +}; |
| 63 | + |
| 64 | +template <typename T> |
| 65 | +allocatable_type_registry allocatable_type_manager<T>::register_type(allocatable_type_manager<T>::delete_objects); |
| 66 | + |
| 67 | +class dyn_var_arena { |
| 68 | + // Arena contains a separate list for each type |
| 69 | + // indexed by a generated type id |
| 70 | + std::vector<arena_list> arena_lists; |
| 71 | + |
| 72 | + template <typename T> |
| 73 | + byte_t* grab_buffer() { |
| 74 | + // Get arena list index for this type |
| 75 | + int index = allocatable_type_manager<T>::register_type.type_id; |
| 76 | + arena_list& list = arena_lists[index]; |
| 77 | + // If list is full add another chunk |
| 78 | + if (list.used_objects == list.chunks.size() * arena_objects_per_chunk) { |
| 79 | + // Alignment and pointer and the end |
| 80 | + static_assert(alignof(T) <= alignof(std::max_align_t), |
| 81 | + "Allocated type has higher alignment requirement that std::max_align_t" |
| 82 | + "needs manual alignment"); |
| 83 | + byte_t* new_chunk = new byte_t[sizeof(T) * arena_objects_per_chunk]; |
| 84 | + list.chunks.push_back(new_chunk); |
| 85 | + } |
| 86 | + int chunk_id = list.used_objects / arena_objects_per_chunk; |
| 87 | + int chunk_offset = list.used_objects % arena_objects_per_chunk; |
| 88 | + |
| 89 | + auto ptr = list.chunks[chunk_id] + sizeof(T) * chunk_offset; |
| 90 | + list.used_objects++; |
| 91 | + return ptr; |
| 92 | + } |
| 93 | + |
| 94 | +public: |
| 95 | + |
| 96 | + dyn_var_arena() { |
| 97 | + arena_lists.resize(allocatable_type_registry::get_max_type_id()); |
| 98 | + } |
| 99 | + |
| 100 | + template <typename T, typename...Args> |
| 101 | + T* allocate(Args&&...args) { |
| 102 | + auto ptr_b = grab_buffer<T>(); |
| 103 | + auto ptr = new (ptr_b) T(std::forward<Args>(args)...); |
| 104 | + return ptr; |
| 105 | + } |
| 106 | + |
| 107 | + |
| 108 | + void reset_arena(void) { |
| 109 | + // For each type call the respective deleters on the type |
| 110 | + for (unsigned int i = 0; i < arena_lists.size(); i++) { |
| 111 | + if (arena_lists[i].used_objects > 0) { |
| 112 | + (*allocatable_type_registry::type_deleters)[i](&(arena_lists[i])); |
| 113 | + } |
| 114 | + } |
| 115 | + } |
| 116 | + |
| 117 | + ~dyn_var_arena() { |
| 118 | + reset_arena(); |
| 119 | + for (auto& a: arena_lists) { |
| 120 | + for (auto c: a.chunks) { |
| 121 | + delete[] c; |
| 122 | + } |
| 123 | + } |
| 124 | + } |
| 125 | +}; |
| 126 | + |
| 127 | +} |
| 128 | + |
| 129 | +#endif |
0 commit comments