#pragma once
#define ZONE_FREE_LIST_SZ 255
#define ZONE_LOOKUP_TABLE_SZ (SMALL_SIZE_MAX >> 4) * sizeof(uint32_t)
#define SZ_TO_ZONE_LOOKUP_IDX(size) size >> 4
#define CHUNK_TO_ZONE_TABLE_SZ (65535 * sizeof(uint16_t))
#define ADDR_TO_CHUNK_TABLE(p) (((uintptr_t) p >> 32) & 0xffff)
typedef int64_t bit_slot_t;
typedef int64_t bitmap_index_t;
typedef uint16_t zone_lookup_table_t;
typedef uint16_t chunk_lookup_table_t;
#if ZONE_FREE_LIST_SZ > 255
typedef uint16_t free_bit_slot_t;
#define FREE_LIST_SHF 16
#else
typedef uint8_t free_bit_slot_t;
#define FREE_LIST_SHF 8
#endif
typedef struct {
void *user_pages_start;
void *bitmap_start;
int64_t next_free_bit_slot;
bit_slot_t free_bit_slots[ZONE_FREE_LIST_SZ];
uint64_t canary_secret;
uint64_t pointer_mask;
bitmap_index_t max_bitmap_idx;
uint32_t chunk_size;
uint32_t bitmap_size;
uint32_t af_count;
uint32_t chunk_count;
uint32_t alloc_count;
uint16_t index;
uint16_t next_sz_index;
free_bit_slot_t free_bit_slots_index;
free_bit_slot_t free_bit_slots_usable;
int8_t preallocated_bitmap_idx;
#if CPU_PIN
uint8_t cpu_core;
#endif
bool internal;
bool is_full;
#if MEMORY_TAGGING
bool tagged;
#endif
} __attribute__((packed, aligned(sizeof(int64_t)))) iso_alloc_zone_t;
typedef struct iso_alloc_big_zone_t {
uint64_t canary_a;
bool free;
uint64_t size;
uint32_t ttl;
void *user_pages_start;
struct iso_alloc_big_zone_t *next;
uint64_t canary_b;
} __attribute__((packed, aligned(sizeof(int64_t)))) iso_alloc_big_zone_t;
#define BITMAP_SIZE_16 16
#define BITMAP_SIZE_32 32
#define BITMAP_SIZE_64 64
#define BITMAP_SIZE_128 128
#define BITMAP_SIZE_256 256
#define BITMAP_SIZE_512 512
#define BITMAP_SIZE_1024 1024
const static int small_bitmap_sizes[] = {
BITMAP_SIZE_1024,
BITMAP_SIZE_512,
BITMAP_SIZE_256,
BITMAP_SIZE_128,
BITMAP_SIZE_64,
BITMAP_SIZE_32,
BITMAP_SIZE_16,
0};
typedef struct {
void *bitmap;
uint32_t in_use;
uint32_t bucket;
} __attribute__((packed, aligned(sizeof(int64_t)))) iso_alloc_bitmap_t;
typedef struct {
iso_alloc_zone_t *zones;
chunk_lookup_table_t *chunk_lookup_table;
uintptr_t *chunk_quarantine;
iso_alloc_big_zone_t *big_zone_free;
iso_alloc_big_zone_t *big_zone_used;
#if NO_ZERO_ALLOCATIONS
void *zero_alloc_page;
#endif
#if UAF_PTR_PAGE
void *uaf_ptr_page;
#endif
zone_lookup_table_t zone_lookup_table[ZONE_LOOKUP_TABLE_SZ];
iso_alloc_bitmap_t bitmaps[sizeof(small_bitmap_sizes) / sizeof(int)];
uint64_t zone_handle_mask;
uint64_t big_zone_next_mask;
uint64_t big_zone_canary_secret;
uint64_t seed;
size_t chunk_quarantine_count;
size_t zones_size;
#if THREAD_SUPPORT
#if USE_SPINLOCK
atomic_flag big_zone_free_flag;
atomic_flag big_zone_used_flag;
#else
pthread_mutex_t big_zone_free_mutex;
pthread_mutex_t big_zone_used_mutex;
#endif
#endif
uint32_t zone_retirement_shf;
int32_t big_zone_free_count;
int32_t big_zone_used_count;
uint16_t zones_used;
#if ARM_MTE
bool arm_mte_enabled;
#endif
} __attribute__((aligned(sizeof(int64_t)))) iso_alloc_root;
typedef struct {
void *user_pages_start;
void *bitmap_start;
uint32_t bitmap_size;
uint8_t ttl;
} __attribute__((aligned(sizeof(int64_t)))) zone_quarantine_t;
typedef struct {
size_t chunk_size;
iso_alloc_zone_t *zone;
} __attribute__((aligned(sizeof(int64_t)))) _tzc;