use crate::{
Reachability,
func_environ::FuncEnvironment,
translate::{HeapData, TargetEnvironment},
trap::TranslateTrap,
};
use Reachability::*;
use cranelift_codegen::{
cursor::{Cursor, FuncCursor},
ir::{self, InstBuilder, RelSourceLoc, condcodes::IntCC},
};
use cranelift_frontend::FunctionBuilder;
#[derive(Debug)]
pub enum BoundsCheck {
StaticOffset { offset: u32, access_size: u8 },
#[cfg(feature = "gc")]
StaticObjectField {
offset: u32,
access_size: u8,
object_size: u32,
},
#[cfg(feature = "gc")]
DynamicObjectField {
offset: ir::Value,
object_size: ir::Value,
},
}
pub fn bounds_check_and_compute_addr(
builder: &mut FunctionBuilder,
env: &mut FuncEnvironment<'_>,
heap: &HeapData,
index: ir::Value,
bounds_check: BoundsCheck,
trap: ir::TrapCode,
) -> Reachability<ir::Value> {
match bounds_check {
BoundsCheck::StaticOffset {
offset,
access_size,
} => bounds_check_field_access(builder, env, heap, index, offset, access_size, trap),
#[cfg(feature = "gc")]
BoundsCheck::StaticObjectField {
offset,
access_size,
object_size,
} => {
let offset_and_access_size = offset.checked_add(access_size.into()).unwrap();
assert!(offset_and_access_size <= object_size);
if let Ok(object_size) = u8::try_from(object_size) {
let obj_ptr = match bounds_check_field_access(
builder,
env,
heap,
index,
0,
object_size,
trap,
) {
Reachable(v) => v,
u @ Unreachable => return u,
};
let offset = builder.ins().iconst(env.pointer_type(), i64::from(offset));
let field_ptr = builder.ins().iadd(obj_ptr, offset);
return Reachable(field_ptr);
}
bounds_check_field_access(builder, env, heap, index, offset, access_size, trap)
}
#[cfg(feature = "gc")]
BoundsCheck::DynamicObjectField {
offset,
object_size,
} => {
assert_eq!(heap.index_type(), ir::types::I32);
assert_eq!(builder.func.dfg.value_type(index), ir::types::I32);
assert_eq!(builder.func.dfg.value_type(offset), ir::types::I32);
assert_eq!(builder.func.dfg.value_type(object_size), ir::types::I32);
let index_and_object_size = builder.ins().uadd_overflow_trap(index, object_size, trap);
let ptr_just_after_obj = match bounds_check_field_access(
builder,
env,
heap,
index_and_object_size,
0,
0,
trap,
) {
Reachable(v) => v,
u @ Unreachable => return u,
};
let backwards_offset = builder.ins().isub(object_size, offset);
let backwards_offset = cast_index_to_pointer_ty(
backwards_offset,
ir::types::I32,
env.pointer_type(),
&mut builder.cursor(),
trap,
);
let field_ptr = builder.ins().isub(ptr_just_after_obj, backwards_offset);
Reachable(field_ptr)
}
}
}
fn bounds_check_field_access(
builder: &mut FunctionBuilder,
env: &mut FuncEnvironment<'_>,
heap: &HeapData,
index: ir::Value,
offset: u32,
access_size: u8,
trap: ir::TrapCode,
) -> Reachability<ir::Value> {
let pointer_bit_width = u16::try_from(env.pointer_type().bits()).unwrap();
let clif_memory_traps_enabled = env.clif_memory_traps_enabled();
let spectre_mitigations_enabled =
env.heap_access_spectre_mitigation() && clif_memory_traps_enabled;
let host_page_size_log2 = env.target_config().page_size_align_log2;
let can_use_virtual_memory = heap
.memory
.can_use_virtual_memory(env.tunables(), host_page_size_log2)
&& clif_memory_traps_enabled;
let can_elide_bounds_check = heap
.memory
.can_elide_bounds_check(env.tunables(), host_page_size_log2)
&& clif_memory_traps_enabled;
let memory_guard_size = env.tunables().memory_guard_size;
let memory_reservation = env.tunables().memory_reservation;
let offset_and_size = offset_plus_size(offset, access_size);
let statically_in_bounds = statically_in_bounds(&builder.func, heap, index, offset_and_size);
let index = cast_index_to_pointer_ty(
index,
heap.index_type(),
env.pointer_type(),
&mut builder.cursor(),
trap,
);
let oob_behavior = if spectre_mitigations_enabled {
OobBehavior::ConditionallyLoadFromZero {
select_spectre_guard: true,
}
} else if env.load_from_zero_allowed() {
OobBehavior::ConditionallyLoadFromZero {
select_spectre_guard: false,
}
} else {
OobBehavior::ExplicitTrap
};
let make_compare =
|builder: &mut FunctionBuilder, compare_kind: IntCC, lhs: ir::Value, rhs: ir::Value| {
builder.ins().icmp(compare_kind, lhs, rhs)
};
if offset_and_size > heap.memory.maximum_byte_size().unwrap_or(u64::MAX) {
env.before_unconditionally_trapping_memory_access(builder);
env.trap(builder, trap);
return Unreachable;
}
if pointer_bit_width < 64 && offset_and_size >= (1 << pointer_bit_width) {
env.before_unconditionally_trapping_memory_access(builder);
env.trap(builder, trap);
return Unreachable;
}
if can_elide_bounds_check
&& u64::from(u32::MAX) <= memory_reservation + memory_guard_size - offset_and_size
{
assert!(heap.index_type() == ir::types::I32);
assert!(
can_use_virtual_memory,
"static memories require the ability to use virtual memory"
);
return Reachable(compute_addr(
&mut builder.cursor(),
heap,
env.pointer_type(),
index,
offset,
));
}
if statically_in_bounds {
return Reachable(compute_addr(
&mut builder.cursor(),
heap,
env.pointer_type(),
index,
offset,
));
}
if can_use_virtual_memory
&& heap.memory.minimum_byte_size().unwrap_or(u64::MAX) <= memory_reservation
&& !heap.memory.memory_may_move(env.tunables())
&& memory_reservation >= offset_and_size
{
let adjusted_bound = memory_reservation.checked_sub(offset_and_size).unwrap();
let adjusted_bound_value = builder
.ins()
.iconst(env.pointer_type(), adjusted_bound as i64);
let oob = make_compare(
builder,
IntCC::UnsignedGreaterThan,
index,
adjusted_bound_value,
);
return Reachable(explicit_check_oob_condition_and_compute_addr(
env,
builder,
heap,
index,
offset,
oob_behavior,
oob,
trap,
));
}
if offset_and_size == 1 && !env.is_pulley() {
let bound = get_dynamic_heap_bound(builder, env, heap);
let oob = make_compare(builder, IntCC::UnsignedGreaterThanOrEqual, index, bound);
return Reachable(explicit_check_oob_condition_and_compute_addr(
env,
builder,
heap,
index,
offset,
oob_behavior,
oob,
trap,
));
}
if can_use_virtual_memory && offset_and_size <= memory_guard_size {
let bound = get_dynamic_heap_bound(builder, env, heap);
let oob = make_compare(builder, IntCC::UnsignedGreaterThan, index, bound);
return Reachable(explicit_check_oob_condition_and_compute_addr(
env,
builder,
heap,
index,
offset,
oob_behavior,
oob,
trap,
));
}
if offset_and_size <= heap.memory.minimum_byte_size().unwrap_or(u64::MAX) {
let bound = get_dynamic_heap_bound(builder, env, heap);
let adjustment = offset_and_size as i64;
let adjustment_value = builder.ins().iconst(env.pointer_type(), adjustment);
let adjusted_bound = builder.ins().isub(bound, adjustment_value);
let oob = make_compare(builder, IntCC::UnsignedGreaterThan, index, adjusted_bound);
return Reachable(explicit_check_oob_condition_and_compute_addr(
env,
builder,
heap,
index,
offset,
oob_behavior,
oob,
trap,
));
}
let access_size_val = builder
.ins()
.iconst(env.pointer_type(), offset_and_size as i64);
let adjusted_index = env.uadd_overflow_trap(builder, index, access_size_val, trap);
let bound = get_dynamic_heap_bound(builder, env, heap);
let oob = make_compare(builder, IntCC::UnsignedGreaterThan, adjusted_index, bound);
Reachable(explicit_check_oob_condition_and_compute_addr(
env,
builder,
heap,
index,
offset,
oob_behavior,
oob,
trap,
))
}
fn get_dynamic_heap_bound(
builder: &mut FunctionBuilder,
env: &mut FuncEnvironment<'_>,
heap: &HeapData,
) -> ir::Value {
match heap.memory.static_heap_size() {
Some(max_size) => builder.ins().iconst(env.pointer_type(), max_size as i64),
_ => builder.ins().global_value(env.pointer_type(), heap.bound),
}
}
fn cast_index_to_pointer_ty(
index: ir::Value,
index_ty: ir::Type,
pointer_ty: ir::Type,
pos: &mut FuncCursor,
trap: ir::TrapCode,
) -> ir::Value {
if index_ty == pointer_ty {
return index;
}
if index_ty.bits() > pointer_ty.bits() {
assert_eq!(index_ty, ir::types::I64);
assert_eq!(pointer_ty, ir::types::I32);
let low_bits = pos.ins().ireduce(pointer_ty, index);
let c32 = pos.ins().iconst(pointer_ty, 32);
let high_bits = pos.ins().ushr(index, c32);
let high_bits = pos.ins().ireduce(pointer_ty, high_bits);
pos.ins().trapnz(high_bits, trap);
return low_bits;
}
let extended_index = pos.ins().uextend(pointer_ty, index);
let loc = pos.srcloc();
let loc = RelSourceLoc::from_base_offset(pos.func.params.base_srcloc(), loc);
pos.func
.stencil
.dfg
.add_value_label_alias(extended_index, loc, index);
extended_index
}
enum OobBehavior {
ExplicitTrap,
ConditionallyLoadFromZero {
select_spectre_guard: bool,
},
}
fn explicit_check_oob_condition_and_compute_addr(
env: &mut FuncEnvironment<'_>,
builder: &mut FunctionBuilder,
heap: &HeapData,
index: ir::Value,
offset: u32,
oob_behavior: OobBehavior,
oob_condition: ir::Value,
trap: ir::TrapCode,
) -> ir::Value {
if let OobBehavior::ExplicitTrap = oob_behavior {
env.trapnz(builder, oob_condition, trap);
}
let addr_ty = env.pointer_type();
let mut addr = compute_addr(&mut builder.cursor(), heap, addr_ty, index, offset);
if let OobBehavior::ConditionallyLoadFromZero {
select_spectre_guard,
} = oob_behavior
{
assert!(env.load_from_zero_allowed());
let null = builder.ins().iconst(addr_ty, 0);
addr = if select_spectre_guard {
builder
.ins()
.select_spectre_guard(oob_condition, null, addr)
} else {
builder.ins().select(oob_condition, null, addr)
};
}
addr
}
fn compute_addr(
pos: &mut FuncCursor,
heap: &HeapData,
addr_ty: ir::Type,
index: ir::Value,
offset: u32,
) -> ir::Value {
debug_assert_eq!(pos.func.dfg.value_type(index), addr_ty);
let heap_base = pos.ins().global_value(addr_ty, heap.base);
let base_and_index = pos.ins().iadd(heap_base, index);
if offset == 0 {
base_and_index
} else {
let offset_val = pos.ins().iconst(addr_ty, i64::from(offset));
pos.ins().iadd(base_and_index, offset_val)
}
}
#[inline]
fn offset_plus_size(offset: u32, size: u8) -> u64 {
offset as u64 + size as u64
}
fn statically_in_bounds(
func: &ir::Function,
heap: &HeapData,
index: ir::Value,
offset_and_size: u64,
) -> bool {
func.dfg
.value_def(index)
.inst()
.and_then(|i| {
let imm = match func.dfg.insts[i] {
ir::InstructionData::UnaryImm {
opcode: ir::Opcode::Iconst,
imm,
} => imm,
_ => return None,
};
let ty = func.dfg.value_type(index);
let index = imm.zero_extend_from_width(ty.bits()).bits().cast_unsigned();
let final_addr = index.checked_add(offset_and_size)?;
Some(final_addr <= heap.memory.minimum_byte_size().unwrap_or(u64::MAX))
})
.unwrap_or(false)
}