#include <alloc/common.h>
#include <alloc/asan.h>
#ifdef BPF_ARENA_ASAN
#pragma clang attribute push(__attribute__((no_sanitize("address"))), \
apply_to = function)
#define SHADOW_ALL_ZEROES ((u64)-1)
volatile u64 asan_violated = 0;
u64 __asan_shadow_memory_dynamic_address;
static bool reported = false;
static bool inited = false;
__always_inline int asan_memset(s8a __arg_arena *dst, s8 val, size_t size)
{
int i;
bpf_for(i, 0, size) {
dst[i] = val;
}
return 0;
}
static __always_inline bool memory_is_poisoned_1(s8a *addr)
{
s8 shadow_value = asan_shadow_value(addr);
if (likely(!shadow_value))
return false;
return ASAN_GRANULE(addr) >= shadow_value;
}
static __always_inline bool memory_is_poisoned_2_4_8(s8a *addr, u64 size)
{
u64 end = (u64)addr + size - 1;
if (likely(ASAN_GRANULE(end) >= size - 1))
return memory_is_poisoned_1((s8a *)end);
return asan_shadow_value(addr) || memory_is_poisoned_1((s8a *)end);
}
__weak bool asan_shadow_set(void __arena __arg_arena *addr)
{
return memory_is_poisoned_1(addr);
}
static __always_inline u64 first_nonzero_byte(u64 addr, size_t size)
{
while (size && can_loop) {
if (unlikely(*(s8a *)addr))
return addr;
addr += 1;
size -= 1;
}
return SHADOW_ALL_ZEROES;
}
static __always_inline bool memory_is_poisoned_n(s8a *addr, u64 size)
{
u64 ret;
u64 start;
u64 end;
start = (u64)mem_to_shadow(addr);
end = (u64)mem_to_shadow(addr + size - 1);
ret = first_nonzero_byte(start, (end - start) + 1);
if (likely(ret == SHADOW_ALL_ZEROES))
return false;
return __builtin_expect(ret != end || ASAN_GRANULE(addr + size - 1) >=
*(s8a *)end, false);
}
static __always_inline int asan_report(s8a __arg_arena *addr, size_t sz,
bool write)
{
#if 0#endif
reported = true;
asan_violated = (u64)addr;
return 0;
}
static __always_inline bool check_asan_args(s8a *addr, size_t size,
bool *result)
{
bool valid = true;
if (unlikely(size == 0))
goto confirmed_valid;
if (unlikely(addr + size < addr))
goto confirmed_invalid;
return false;
confirmed_invalid:
valid = false;
confirmed_valid:
*result = valid;
return true;
}
static __always_inline bool check_region_inline(void *ptr, size_t size,
bool write)
{
s8a *addr = (s8a *)(u64)ptr;
bool is_poisoned, is_valid;
if (check_asan_args(addr, size, &is_valid)) {
if (!is_valid)
asan_report(addr, size, write);
return is_valid;
}
switch (size) {
case 1:
is_poisoned = memory_is_poisoned_1(addr);
break;
case 2:
case 4:
case 8:
is_poisoned = memory_is_poisoned_2_4_8(addr, size);
break;
default:
is_poisoned = memory_is_poisoned_n(addr, size);
}
if (is_poisoned) {
asan_report(addr, size, write);
return false;
}
return true;
}
#define DEFINE_ASAN_LOAD_STORE(size) \
__hidden void __asan_store##size(void *addr) \
{ \
check_region_inline(addr, size, true); \
} \
__hidden void __always_inline __asan_store##size##_noabort(void *addr) \
{ \
check_region_inline(addr, size, true); \
} \
__hidden void __asan_load##size(void *addr) \
{ \
check_region_inline(addr, size, false); \
} \
__hidden void __asan_load##size##_noabort(void *addr) \
{ \
check_region_inline(addr, size, false); \
} \
__hidden void __asan_report_store##size(void *addr) \
{ \
asan_report((s8a *)addr, size, true); \
} \
__hidden void __asan_report_store##size##_noabort(void *addr) \
{ \
asan_report((s8a *)addr, size, true); \
} \
__hidden void __asan_report_load##size(void *addr) \
{ \
asan_report((s8a *)addr, size, false); \
} \
__hidden void __asan_report_load##size##_noabort(void *addr) \
{ \
asan_report((s8a *)addr, size, false); \
}
DEFINE_ASAN_LOAD_STORE(1);
DEFINE_ASAN_LOAD_STORE(2);
DEFINE_ASAN_LOAD_STORE(4);
DEFINE_ASAN_LOAD_STORE(8);
void __asan_storeN(void *addr, ssize_t size)
{
check_region_inline(addr, size, true);
}
void __asan_loadN(void *addr, ssize_t size)
{
check_region_inline(addr, size, false);
}
void __asan_register_globals(void *globals, size_t n)
{
bpf_printk("Emitted %s", __func__);
}
void __asan_unregister_globals(void *globals, size_t n)
{
bpf_printk("Emitted %s", __func__);
}
void *__asan_memcpy(void *d, const void *s, size_t n)
{
bpf_printk("Emitted %s", __func__);
return NULL;
}
void *__asan_memmove(void *d, const void *s, size_t n)
{
bpf_printk("Emitted %s", __func__);
return NULL;
}
void *__asan_memset(void *p, int c, size_t n)
{
bpf_printk("Emitted %s", __func__);
return NULL;
}
__hidden __noasan int asan_poison(void __arena *addr, s8 val, size_t size)
{
s8a *shadow;
size_t len;
if (unlikely((u64)addr & ASAN_GRANULE_MASK))
return -EINVAL;
if (unlikely(size & ASAN_GRANULE_MASK))
return -EINVAL;
shadow = mem_to_shadow(addr);
len = size >> ASAN_SHADOW_SHIFT;
asan_memset(shadow, val, len);
return 0;
}
__hidden __noasan int asan_unpoison(void __arena *addr, size_t size)
{
size_t partial = size & ASAN_GRANULE_MASK;
s8a *shadow;
size_t len;
if (unlikely((u64)addr & ASAN_GRANULE_MASK))
return -EINVAL;
shadow = mem_to_shadow(addr);
len = size >> ASAN_SHADOW_SHIFT;
asan_memset(shadow, 0, len);
if (partial)
shadow[len] = partial;
return 0;
}
SEC("syscall")
__hidden __noasan int asan_init(struct asan_init_args *args)
{
u64 globals_pages = args->arena_globals_pages;
u64 all_pages = args->arena_all_pages;
u64 shadowmap, shadow_pgoff;
u64 shadow_pages;
if (inited)
return 0;
shadow_pages = all_pages >> ASAN_SHADOW_SHIFT;
if (all_pages > (1ULL << 32) >> PAGE_SHIFT) {
bpf_printk("error: arena size %lx too large", all_pages);
return -EINVAL;
}
if (globals_pages > all_pages) {
bpf_printk("error: globals %lx do not fit in arena %lx", globals_pages, all_pages);
return -EINVAL;
}
if (globals_pages + shadow_pages > all_pages) {
bpf_printk("error: globals %lx do not leave room for shadow map %lx (arena pages %lx)",
globals_pages, shadow_pages, all_pages);
return -EINVAL;
}
shadow_pgoff = all_pages - shadow_pages - globals_pages;
__asan_shadow_memory_dynamic_address = shadow_pgoff * PAGE_SIZE;
shadowmap = (u64)bpf_arena_alloc_pages(
&arena, (void __arena *)__asan_shadow_memory_dynamic_address,
shadow_pages, NUMA_NO_NODE, 0);
if (!shadowmap) {
arena_stderr("Could not allocate shadow map\n");
return -ENOMEM;
}
inited = true;
return 0;
}
#pragma clang attribute pop
#endif