1#ifndef LIBCLOD_ALLOCATOR_H
2#define LIBCLOD_ALLOCATOR_H
4#include "clod_config.h"
12#define TREE_MAX_DEPTH 5
19typedef uint32_t block_ptr;
21#define IS_BLOCK_ALIGNED(n) ((uintptr_t)(n) % BLOCK_SIZE == 0)
22#define BLOCK_PTR_NULL ((block_ptr)0)
42#define SLAB_CLASS_COUNT 13
46 if ((
char*)
node < (
char*)head)
return BLOCK_PTR_NULL;
47 if (!IS_BLOCK_ALIGNED(head) || !IS_BLOCK_ALIGNED(
node))
return BLOCK_PTR_NULL;
49 return (block_ptr)((
char*)
node - (
char*)head) + TYPE_NODE;
53 if ((
char*)span < (
char*)head)
return BLOCK_PTR_NULL;
54 if (!IS_BLOCK_ALIGNED(head) || !IS_BLOCK_ALIGNED(span))
return BLOCK_PTR_NULL;
56 return (block_ptr)((
char*)span - (
char*)head) + TYPE_SPAN;
58static block_ptr slab_ptr(
clod_allocator *head,
char *slab,
const int slab_class) {
60 if (1 > slab_class || slab_class > 13)
return BLOCK_PTR_NULL;
61 if ((
char*)slab < (
char*)head)
return BLOCK_PTR_NULL;
62 if (!IS_BLOCK_ALIGNED(head) || !IS_BLOCK_ALIGNED(slab))
return BLOCK_PTR_NULL;
64 return (block_ptr)((
char*)slab - (
char*)head + TYPE_SLAB + (slab_class << 4));
68 if ((
char*)
node < (
char*)head)
return BLOCK_PTR_NULL;
69 if (!IS_BLOCK_ALIGNED(
node) || !IS_BLOCK_ALIGNED(head))
return BLOCK_PTR_NULL;
70 if (0 > index || index >= TREE_ORDER)
return BLOCK_PTR_NULL;
72 return (block_ptr)((
char*)
node - (
char*)head) + TYPE_BRANCH + ((block_ptr)index << 3);
75static int ptr_type(
const block_ptr bptr) {
76 if ((bptr & 0xFF) == TYPE_NODE)
return TYPE_NODE;
77 if ((bptr & 0xFF) == TYPE_SPAN)
return TYPE_SPAN;
78 if ((bptr & 0x0F) == TYPE_SLAB)
return TYPE_SLAB;
79 if ((bptr & 0x07) == TYPE_BRANCH)
return TYPE_BRANCH;
82static uint32_t block_offset(
const block_ptr bptr) {
return bptr & 0xFFFFFF00; }
85 if ((bptr & 0xFF) != TYPE_NODE && (bptr & 0x7) != TYPE_BRANCH)
return nullptr;
86 return (
struct node*)((
char*)head + block_offset(bptr));
89 if ((bptr & 0xFF) != TYPE_SPAN)
return nullptr;
90 return (
char*)head + block_offset(bptr);
93 if ((bptr & 0xF) != TYPE_SLAB)
return nullptr;
94 return (
char*)head + block_offset(bptr);
96static int slab_type_get(
const block_ptr bptr) {
97 if ((bptr & 0xF) != TYPE_SLAB)
return 0;
98 int type = (bptr & 0xF0) >> 4;
100 if (1 > type || type > 13)
return 0;
104static int branch_index_get(
const block_ptr bptr) {
105 if ((bptr & 0x7) != TYPE_BRANCH)
return 0;
106 return (bptr & 0xF8) >> 3;
136 alignas(8)
struct branch branches[TREE_ORDER];
139static_assert(
sizeof(
struct node) <= 256);
141typedef struct branch **path;
142#define PATH_NEW (&(struct branch*[TREE_MAX_DEPTH + 1]){nullptr}[1])
144static struct branch *path_pop(path *path) {
return **path ==
nullptr ? nullptr : *--*path; }
145static struct branch *path_get(
const path path,
const int parent) {
146 for (
int i = 0; i < parent; i++)
147 if (path[-i] ==
nullptr)
return nullptr;
148 return path[-parent];
154 if (path_get(*path, 0) ==
nullptr) {
155 path_push(path, root_node->branches);
158 return tree_next(head, path);
161void tree_search(
clod_allocator *head,
struct node *root_node, block_ptr key, path *path);
188 alignas(BLOCK_SIZE)
struct node free;
189 alignas(BLOCK_SIZE)
struct node used;
194#define METADATA_CRC_INIT UINT32_C(0xAE11F296)
195static void crc_check(
void *block) {
196 #if CLOD_MEMORY_DEBUG
197 uint32_t crc =
clod_crc32_add(METADATA_CRC_INIT, (
char*)block +
sizeof(uint32_t), BLOCK_SIZE -
sizeof(uint32_t));
200 uint32_t crc = (uint32_t)block;
203 if (*(uint32_t*)block != crc) {
204 fatal_old(CLOD_MEMORY_DEBUG,
"Allocator metadata is corrupted.");
207static void crc_update(
void *block) {
208 #if CLOD_MEMORY_DEBUG
209 uint32_t crc =
clod_crc32_add(METADATA_CRC_INIT, (
char*)block +
sizeof(uint32_t), BLOCK_SIZE -
sizeof(uint32_t));
212 uint32_t crc = (uint32_t)block;
215 *(uint32_t*)block = crc;
uint32_t clod_crc32_add(uint32_t crc, const void *data, size_t data_len)
Memory allocation methods.
block_ptr ptr
Either the next node in the tree or the block value.
uint32_t checksum
Checksum.
block_ptr last_slab_branch[13]
void(* free)(void *ptr, void *self)
uint32_t size
Size of backing memory.
uint32_t checksum
Checksum.