|
| 1 | +#include "allocator.h" |
| 2 | +#include "mem.h" |
| 3 | +#include "types.h" |
| 4 | +#include "zialloc_memory.hpp" |
| 5 | + |
| 6 | +#include <cstddef> |
| 7 | +#include <cstdint> |
| 8 | +#include <atomic> |
| 9 | +#include <cstdio> |
| 10 | +#include <cstring> |
| 11 | + |
| 12 | +static bool g_initialized = false; |
| 13 | +static allocator_stats_t g_stats{}; |
| 14 | +static std::atomic<uint64_t> g_alloc_count{0}; |
| 15 | +static std::atomic<uint64_t> g_free_count{0}; |
| 16 | +static std::atomic<uint64_t> g_realloc_count{0}; |
| 17 | +static std::atomic<size_t> g_bytes_allocated{0}; |
| 18 | +static std::atomic<int64_t> g_bytes_in_use{0}; |
| 19 | + |
| 20 | +namespace { |
| 21 | +struct LocalStatsBatch { |
| 22 | + uint64_t alloc_count; |
| 23 | + uint64_t free_count; |
| 24 | + uint64_t realloc_count; |
| 25 | + size_t bytes_allocated; |
| 26 | + int64_t bytes_in_use_delta; |
| 27 | + uint32_t ops; |
| 28 | +}; |
| 29 | + |
| 30 | +static thread_local LocalStatsBatch g_local_stats{0, 0, 0, 0, 0, 0}; |
| 31 | +static constexpr uint32_t STATS_FLUSH_INTERVAL = 1024; |
| 32 | + |
| 33 | +static inline void flush_local_stats_batch() { |
| 34 | + if (g_local_stats.alloc_count) { |
| 35 | + g_alloc_count.fetch_add(g_local_stats.alloc_count, std::memory_order_relaxed); |
| 36 | + g_local_stats.alloc_count = 0; |
| 37 | + } |
| 38 | + if (g_local_stats.free_count) { |
| 39 | + g_free_count.fetch_add(g_local_stats.free_count, std::memory_order_relaxed); |
| 40 | + g_local_stats.free_count = 0; |
| 41 | + } |
| 42 | + if (g_local_stats.realloc_count) { |
| 43 | + g_realloc_count.fetch_add(g_local_stats.realloc_count, std::memory_order_relaxed); |
| 44 | + g_local_stats.realloc_count = 0; |
| 45 | + } |
| 46 | + if (g_local_stats.bytes_allocated) { |
| 47 | + g_bytes_allocated.fetch_add(g_local_stats.bytes_allocated, std::memory_order_relaxed); |
| 48 | + g_local_stats.bytes_allocated = 0; |
| 49 | + } |
| 50 | + if (g_local_stats.bytes_in_use_delta != 0) { |
| 51 | + g_bytes_in_use.fetch_add(g_local_stats.bytes_in_use_delta, std::memory_order_relaxed); |
| 52 | + g_local_stats.bytes_in_use_delta = 0; |
| 53 | + } |
| 54 | + g_local_stats.ops = 0; |
| 55 | +} |
| 56 | + |
| 57 | +static inline void maybe_flush_local_stats_batch() { |
| 58 | + g_local_stats.ops++; |
| 59 | + if (g_local_stats.ops >= STATS_FLUSH_INTERVAL) { |
| 60 | + flush_local_stats_batch(); |
| 61 | + } |
| 62 | +} |
| 63 | +} // namespace |
| 64 | + |
| 65 | +static int zialloc_init(void); |
| 66 | +static void zialloc_teardown(void); |
| 67 | +static void zialloc_print_stats(void); |
| 68 | +static bool zialloc_get_stats(allocator_stats_t *stats); |
| 69 | +static bool zialloc_validate_heap(void); |
| 70 | +static size_t zialloc_usable_size(void *ptr); |
| 71 | +static allocator_stats_t zialloc_snapshot_stats(void); |
| 72 | + |
| 73 | +namespace zialloc { |
| 74 | + |
| 75 | +class Allocator { |
| 76 | +public: |
| 77 | + static Allocator &instance() { |
| 78 | + static Allocator alloc; |
| 79 | + return alloc; |
| 80 | + } |
| 81 | + |
| 82 | + void *malloc(size_t size); |
| 83 | + void free(void *ptr); |
| 84 | + void *realloc(void *ptr, size_t size); |
| 85 | + void *calloc(size_t nmemb, size_t size); |
| 86 | + |
| 87 | +private: |
| 88 | + Allocator() = default; |
| 89 | + ~Allocator() = default; |
| 90 | +}; |
| 91 | + |
| 92 | +void *Allocator::malloc(size_t size) { |
| 93 | + if (size == 0) |
| 94 | + return nullptr; |
| 95 | + if (size >= (SIZE_MAX - 4096)) |
| 96 | + return nullptr; |
| 97 | + if (size > HEAP_RESERVED_DEFAULT) |
| 98 | + return nullptr; |
| 99 | + if (!g_initialized && zialloc_init() != 0) |
| 100 | + return nullptr; |
| 101 | + |
| 102 | + void *ptr = memory::heap_alloc(size); |
| 103 | + if (!ptr) |
| 104 | + return nullptr; |
| 105 | + |
| 106 | + size_t usable = memory::heap_last_alloc_usable(); |
| 107 | + if (usable == 0) { |
| 108 | + usable = memory::heap_usable_size(ptr); |
| 109 | + } |
| 110 | + g_local_stats.alloc_count++; |
| 111 | + g_local_stats.bytes_allocated += size; |
| 112 | + g_local_stats.bytes_in_use_delta += static_cast<int64_t>(usable); |
| 113 | + maybe_flush_local_stats_batch(); |
| 114 | + return ptr; |
| 115 | +} |
| 116 | + |
| 117 | +void Allocator::free(void *ptr) { |
| 118 | + if (!ptr) |
| 119 | + return; |
| 120 | + IS_HEAP_INITIALIZED(g_initialized); |
| 121 | + |
| 122 | + size_t usable = 0; |
| 123 | + if (!memory::free_dispatch_with_size(ptr, &usable)) |
| 124 | + std::abort(); |
| 125 | + |
| 126 | + g_local_stats.free_count++; |
| 127 | + g_local_stats.bytes_in_use_delta -= static_cast<int64_t>(usable); |
| 128 | + maybe_flush_local_stats_batch(); |
| 129 | +} |
| 130 | + |
| 131 | +void *Allocator::realloc(void *ptr, size_t size) { |
| 132 | + if (ptr == nullptr) |
| 133 | + return malloc(size); |
| 134 | + IS_HEAP_INITIALIZED(g_initialized); |
| 135 | + if (size == 0) { |
| 136 | + free(ptr); |
| 137 | + return nullptr; |
| 138 | + } |
| 139 | + |
| 140 | + size_t old_usable = memory::heap_usable_size(ptr); |
| 141 | + if (old_usable >= size) { |
| 142 | + g_local_stats.realloc_count++; |
| 143 | + maybe_flush_local_stats_batch(); |
| 144 | + return ptr; |
| 145 | + } |
| 146 | + |
| 147 | + void *new_ptr = malloc(size); |
| 148 | + if (!new_ptr) |
| 149 | + return nullptr; |
| 150 | + |
| 151 | + std::memcpy(new_ptr, ptr, old_usable); |
| 152 | + free(ptr); |
| 153 | + g_local_stats.realloc_count++; |
| 154 | + maybe_flush_local_stats_batch(); |
| 155 | + return new_ptr; |
| 156 | +} |
| 157 | + |
| 158 | +void *Allocator::calloc(size_t nmemb, size_t size) { |
| 159 | + if (nmemb != 0 && size > SIZE_MAX / nmemb) |
| 160 | + return nullptr; |
| 161 | + size_t total = nmemb * size; |
| 162 | + void *ptr = malloc(total); |
| 163 | + if (!ptr) |
| 164 | + return nullptr; |
| 165 | + std::memset(ptr, 0, total); |
| 166 | + return ptr; |
| 167 | +} |
| 168 | + |
| 169 | +} // namespace zialloc |
| 170 | + |
| 171 | +static void *zialloc_malloc(size_t size) { |
| 172 | + return zialloc::Allocator::instance().malloc(size); |
| 173 | +} |
| 174 | + |
| 175 | +static void zialloc_free(void *ptr) { |
| 176 | + zialloc::Allocator::instance().free(ptr); |
| 177 | +} |
| 178 | + |
| 179 | +static void *zialloc_realloc(void *ptr, size_t size) { |
| 180 | + return zialloc::Allocator::instance().realloc(ptr, size); |
| 181 | +} |
| 182 | + |
| 183 | +static void *zialloc_calloc(size_t nmemb, size_t size) { |
| 184 | + return zialloc::Allocator::instance().calloc(nmemb, size); |
| 185 | +} |
| 186 | + |
| 187 | +static size_t zialloc_usable_size(void *ptr) { |
| 188 | + return zialloc::memory::heap_usable_size(ptr); |
| 189 | +} |
| 190 | + |
| 191 | +static allocator_stats_t zialloc_snapshot_stats(void) { |
| 192 | + flush_local_stats_batch(); |
| 193 | + allocator_stats_t snapshot = g_stats; |
| 194 | + snapshot.alloc_count = g_alloc_count.load(std::memory_order_relaxed); |
| 195 | + snapshot.free_count = g_free_count.load(std::memory_order_relaxed); |
| 196 | + snapshot.realloc_count = g_realloc_count.load(std::memory_order_relaxed); |
| 197 | + snapshot.bytes_allocated = g_bytes_allocated.load(std::memory_order_relaxed); |
| 198 | + const int64_t in_use = g_bytes_in_use.load(std::memory_order_relaxed); |
| 199 | + snapshot.bytes_in_use = in_use > 0 ? static_cast<size_t>(in_use) : 0; |
| 200 | + return snapshot; |
| 201 | +} |
| 202 | + |
| 203 | +static void zialloc_print_stats(void) { |
| 204 | + allocator_stats_t snapshot = zialloc_snapshot_stats(); |
| 205 | + printf(" Allocations: %lu\n", (unsigned long)snapshot.alloc_count); |
| 206 | + printf(" Frees: %lu\n", (unsigned long)snapshot.free_count); |
| 207 | + printf(" Reallocs: %lu\n", (unsigned long)snapshot.realloc_count); |
| 208 | + printf(" Bytes in use: %zu\n", snapshot.bytes_in_use); |
| 209 | + printf(" Bytes mapped: %zu\n", snapshot.bytes_mapped); |
| 210 | + printf(" mmap calls: %lu\n", (unsigned long)snapshot.mmap_count); |
| 211 | + printf(" munmap calls: %lu\n", (unsigned long)snapshot.munmap_count); |
| 212 | +} |
| 213 | + |
| 214 | +static bool zialloc_get_stats(allocator_stats_t *stats) { |
| 215 | + if (!stats) |
| 216 | + return false; |
| 217 | + *stats = zialloc_snapshot_stats(); |
| 218 | + return true; |
| 219 | +} |
| 220 | + |
| 221 | +static bool zialloc_validate_heap(void) { |
| 222 | + return zialloc::memory::heap_validate(); |
| 223 | +} |
| 224 | + |
| 225 | +static int zialloc_init(void) { |
| 226 | + if (g_initialized) |
| 227 | + return 0; |
| 228 | + |
| 229 | + std::memset(&g_stats, 0, sizeof(g_stats)); |
| 230 | + g_alloc_count.store(0, std::memory_order_relaxed); |
| 231 | + g_free_count.store(0, std::memory_order_relaxed); |
| 232 | + g_realloc_count.store(0, std::memory_order_relaxed); |
| 233 | + g_bytes_allocated.store(0, std::memory_order_relaxed); |
| 234 | + g_bytes_in_use.store(0, std::memory_order_relaxed); |
| 235 | + g_local_stats = {0, 0, 0, 0, 0, 0}; |
| 236 | + |
| 237 | + const size_t heap_reserved_size = HEAP_RESERVED_DEFAULT; |
| 238 | + void *reserved_base = zialloc::memory::reserve_region(heap_reserved_size); |
| 239 | + if (!reserved_base) |
| 240 | + return -1; |
| 241 | + |
| 242 | + if (!zialloc::memory::heap_init_reserved(reserved_base, heap_reserved_size)) |
| 243 | + return -1; |
| 244 | + |
| 245 | + // feature toggle: zero-on-free check, default disabled for speed |
| 246 | + zialloc::memory::set_zero_on_free_enabled(false); |
| 247 | + zialloc::memory::set_uaf_check_enabled(false); |
| 248 | + |
| 249 | + // keep one small/medium/large segment active from start |
| 250 | + if (!zialloc::memory::heap_add_segment_for_class(PAGE_SM)) |
| 251 | + return -1; |
| 252 | + if (!zialloc::memory::heap_add_segment_for_class(PAGE_MED)) |
| 253 | + return -1; |
| 254 | + if (!zialloc::memory::heap_add_segment_for_class(PAGE_LG)) |
| 255 | + return -1; |
| 256 | + |
| 257 | + g_initialized = true; |
| 258 | + return 0; |
| 259 | +} |
| 260 | + |
| 261 | +static void zialloc_teardown(void) { |
| 262 | + if (!g_initialized) |
| 263 | + return; |
| 264 | + zialloc::memory::heap_clear_metadata(); |
| 265 | + zialloc::memory::set_zero_on_free_enabled(false); |
| 266 | + zialloc::memory::set_uaf_check_enabled(false); |
| 267 | + std::memset(&g_stats, 0, sizeof(g_stats)); |
| 268 | + g_alloc_count.store(0, std::memory_order_relaxed); |
| 269 | + g_free_count.store(0, std::memory_order_relaxed); |
| 270 | + g_realloc_count.store(0, std::memory_order_relaxed); |
| 271 | + g_bytes_allocated.store(0, std::memory_order_relaxed); |
| 272 | + g_bytes_in_use.store(0, std::memory_order_relaxed); |
| 273 | + g_local_stats = {0, 0, 0, 0, 0, 0}; |
| 274 | + g_initialized = false; |
| 275 | +} |
| 276 | + |
| 277 | +allocator_t zialloc_allocator = { |
| 278 | + .malloc = zialloc_malloc, |
| 279 | + .free = zialloc_free, |
| 280 | + .realloc = zialloc_realloc, |
| 281 | + .calloc = zialloc_calloc, |
| 282 | + .memalign = NULL, |
| 283 | + .aligned_alloc = NULL, |
| 284 | + .usable_size = zialloc_usable_size, |
| 285 | + .free_sized = NULL, |
| 286 | + .realloc_array = NULL, |
| 287 | + .bulk_free = NULL, |
| 288 | + .print_stats = zialloc_print_stats, |
| 289 | + .validate_heap = zialloc_validate_heap, |
| 290 | + .get_stats = zialloc_get_stats, |
| 291 | + .init = zialloc_init, |
| 292 | + .teardown = zialloc_teardown, |
| 293 | + .name = "Zialloc", |
| 294 | + .author = "ZiaRashid", |
| 295 | + .version = "1.0.0", |
| 296 | + .description = "custom memory allocator", |
| 297 | + .memory_backend = "mmap", |
| 298 | + .features = |
| 299 | + { |
| 300 | + .thread_safe = true, |
| 301 | + .per_thread_cache = true, |
| 302 | + .huge_page_support = false, |
| 303 | + .guard_pages = false, |
| 304 | + .guard_location = GUARD_NONE, |
| 305 | + .canaries = false, |
| 306 | + .quarantine = false, |
| 307 | + .zero_on_free = false, |
| 308 | + .min_alignment = 16, |
| 309 | + .max_alignment = 16, |
| 310 | + }, |
| 311 | +}; |
| 312 | + |
| 313 | +extern "C" allocator_t *get_test_allocator(void) { return &zialloc_allocator; } |
| 314 | +extern "C" allocator_t *get_bench_allocator(void) { return &zialloc_allocator; } |
| 315 | + |
0 commit comments