use crate::{
align::Align,
core::{exclusive_monitor::LocalMonitor, Condition, Config, Coprocessor, Irq, MonitorState},
decoder::{BasicInstructionDecoder, InstructionDecode, InstructionDecodeError},
helpers::BitAccess,
instructions::{Instruction, InstructionSize},
memory::{Env, MemoryAccessError, MemoryInterface, MemoryOpAction, RamMemory},
mpu::{v7m::MpuV7M, v8m::MemoryProtectionUnitV8M},
registers::{CoreRegisters, Mode, RegisterIndex},
system_control::SystemControl,
};
use core::panic;
use std::{
cell::RefCell,
collections::BTreeSet,
ops::{Index, Range},
rc::Rc,
};
struct MemoryMap {
address: u32,
size: u32,
iface: Rc<RefCell<dyn MemoryInterface>>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum RunError {
InstructionUnknown,
InstructionUnpredictable,
InstructionUndefined,
Unpredictable,
MemRead {
address: u32,
size: u32,
cause: MemoryAccessError,
},
MemWrite {
address: u32,
size: u32,
value: u32,
cause: MemoryAccessError,
},
}
impl From<InstructionDecodeError> for RunError {
fn from(e: InstructionDecodeError) -> Self {
match e {
InstructionDecodeError::Unknown => RunError::InstructionUnknown,
InstructionDecodeError::Unpredictable => RunError::InstructionUnpredictable,
InstructionDecodeError::Undefined => todo!(),
}
}
}
struct CodeHook {
range: Range<usize>,
}
#[derive(Clone)]
pub enum Event {
Hook {
address: u32,
},
Instruction {
ins: InstructionBox,
},
Reset,
Break(u8),
DebugHint(u8),
}
struct MemoryMappings(Vec<MemoryMap>);
impl MemoryMappings {
pub fn new() -> Self {
Self(Vec::new())
}
fn get(&self, address: u32) -> Option<&MemoryMap> {
for mapping in self.0.iter() {
let start = mapping.address as usize;
let end = mapping.address as usize + mapping.size as usize;
if ((address as usize) >= start) && ((address as usize) < end) {
return Some(mapping);
}
}
None
}
fn get_mut(&mut self, address: u32) -> Option<&mut MemoryMap> {
for mapping in self.0.iter_mut() {
let start = mapping.address as usize;
let end = mapping.address as usize + mapping.size as usize;
if ((address as usize) >= start) && ((address as usize) < end) {
return Some(mapping);
}
}
None
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum ArmVersion {
V6M,
V7M,
V7EM,
V8M,
}
pub struct Processor {
pub version: ArmVersion,
pub registers: CoreRegisters,
pub execution_priority: i16,
exception_active: Vec<bool>,
state: State,
memory_mappings: MemoryMappings,
pub local_monitor: LocalMonitor,
pub instruction_decoder: Box<dyn InstructionDecode>,
pub cycles: u64,
code_hooks: Vec<CodeHook>,
memory_op_actions: Vec<MemoryOpAction>,
interrupt_requests: BTreeSet<Irq>,
system_control: Rc<RefCell<SystemControl>>,
pub coprocessors: Vec<Option<Rc<RefCell<dyn Coprocessor>>>>,
pub tolerate_pop_stack_unaligned_pc: bool,
events: Vec<Event>,
}
type InstructionBox = Rc<dyn Instruction>;
impl Processor {
pub fn new(config: Config) -> Self {
let version = config.version;
let exception_count = 16usize.checked_add(config.external_exceptions).unwrap();
let coprocessor_count = match version {
ArmVersion::V6M => 0,
ArmVersion::V7M | ArmVersion::V7EM | ArmVersion::V8M => 16,
};
let system_control = Rc::new(RefCell::new(SystemControl::new()));
let mut processor = Self {
version,
registers: CoreRegisters::new(),
state: State::Running,
memory_mappings: MemoryMappings::new(),
local_monitor: LocalMonitor::new(config.exclusives_reservation_granule),
execution_priority: 0,
exception_active: (0..exception_count).map(|_| false).collect(),
instruction_decoder: Box::new(BasicInstructionDecoder::new(version)),
cycles: 0,
code_hooks: Vec::new(),
memory_op_actions: Vec::new(),
interrupt_requests: BTreeSet::new(),
system_control: system_control.clone(),
coprocessors: (0..coprocessor_count).map(|_| None).collect(),
tolerate_pop_stack_unaligned_pc: false,
events: Vec::new(),
};
processor.map_iface(0xe000e000, system_control).unwrap();
match processor.version {
ArmVersion::V6M => {}
ArmVersion::V7M | ArmVersion::V7EM => processor
.map_iface(0xe000ed90, Rc::new(RefCell::new(MpuV7M::new())))
.unwrap(),
ArmVersion::V8M => {
processor
.map_iface(
0xe000ed90,
Rc::new(RefCell::new(MemoryProtectionUnitV8M::new(16))),
)
.unwrap();
}
}
processor
}
pub fn map(
&mut self,
address: u32,
data: &[u8],
) -> Result<Rc<RefCell<RamMemory>>, MapConflict> {
let ram = Rc::new(RefCell::new(RamMemory::new_from_slice(data)));
self.map_iface(address, ram.clone())?;
Ok(ram)
}
pub fn map_iface(
&mut self,
address: u32,
iface: Rc<RefCell<dyn MemoryInterface>>,
) -> Result<(), MapConflict> {
let size = iface.borrow().size();
if address.checked_add(size).is_none() {
return Err(MapConflict);
}
if self.memory_mappings.0.iter().any(|m| {
let a = m.address.max(address);
let b = (m.address + m.size).min(address + size);
a < b
}) {
return Err(MapConflict);
}
self.memory_mappings.0.push(MemoryMap {
address,
size,
iface,
});
Ok(())
}
pub fn map_ram(
&mut self,
address: u32,
size: u32,
) -> Result<Rc<RefCell<RamMemory>>, MapConflict> {
let ram = Rc::new(RefCell::new(RamMemory::new_zero(size as usize)));
self.map_iface(address, ram.clone())?;
Ok(ram)
}
pub fn set_coprocessor(&mut self, index: usize, coprocessor: Rc<RefCell<dyn Coprocessor>>) {
assert!(index < 16);
self.coprocessors[index] = Some(coprocessor)
}
pub fn hook_code(&mut self, range: Range<usize>) {
self.code_hooks.push(CodeHook { range })
}
fn usage_fault_if_unaligned(&mut self, address: u32, size: usize) -> Result<(), RunError> {
if !address.is_aligned(size) {
self.system_control.borrow_mut().cfsr.set_unaligned(true);
self.exception_taken(Irq::UsageFault)?;
}
Ok(())
}
pub fn read_u8_with_priv(&mut self, address: u32, privileged: bool) -> Result<u8, RunError> {
self.validate_address(address, privileged, false, false);
self.read_u8_iface(address)
}
pub fn write_u8_with_priv(
&mut self,
address: u32,
value: u8,
privileged: bool,
) -> Result<(), RunError> {
self.validate_address(address, privileged, false, false);
self.write_u8_iface(address, value)
}
pub fn read_u16_aligned_with_priv(
&mut self,
address: u32,
privileged: bool,
) -> Result<u16, RunError> {
self.usage_fault_if_unaligned(address, 2)?;
self.validate_address(address, privileged, false, false);
let mut value = self.read_u16le_iface(address)?;
if self.system_control.borrow_mut().aircr.endianess() {
value = value.swap_bytes()
}
Ok(value)
}
pub fn write_u16_aligned_with_priv(
&mut self,
address: u32,
mut value: u16,
privileged: bool,
) -> Result<(), RunError> {
self.usage_fault_if_unaligned(address, 2)?;
self.validate_address(address, privileged, false, false);
if self.system_control.borrow_mut().aircr.endianess() {
value = value.swap_bytes()
}
self.write_u16le_iface(address, value)
}
pub fn read_u32_aligned_with_priv(
&mut self,
address: u32,
privileged: bool,
) -> Result<u32, RunError> {
self.usage_fault_if_unaligned(address, 4)?;
self.validate_address(address, privileged, false, false);
let mut value = self.read_u32le_iface(address)?;
if self.system_control.borrow_mut().aircr.endianess() {
value = value.swap_bytes()
}
Ok(value)
}
pub fn write_u32_aligned_with_priv(
&mut self,
address: u32,
mut value: u32,
privileged: bool,
) -> Result<(), RunError> {
self.usage_fault_if_unaligned(address, 4)?;
self.validate_address(address, privileged, false, false);
if self.system_control.borrow_mut().aircr.endianess() {
value = value.swap_bytes()
}
self.write_u32le_iface(address, value)
}
pub fn read_u8(&mut self, address: u32) -> Result<u8, RunError> {
self.read_u8_with_priv(address, self.is_privileged())
}
pub fn write_u8(&mut self, address: u32, value: u8) -> Result<(), RunError> {
self.write_u8_with_priv(address, value, self.is_privileged())
}
pub fn read_u16_aligned(&mut self, address: u32) -> Result<u16, RunError> {
self.read_u16_aligned_with_priv(address, self.is_privileged())
}
pub fn write_u16_aligned(&mut self, address: u32, value: u16) -> Result<(), RunError> {
self.write_u16_aligned_with_priv(address, value, self.is_privileged())
}
pub fn read_u32_aligned(&mut self, address: u32) -> Result<u32, RunError> {
self.read_u32_aligned_with_priv(address, self.is_privileged())
}
pub fn write_u32_aligned(&mut self, address: u32, value: u32) -> Result<(), RunError> {
self.write_u32_aligned_with_priv(address, value, self.is_privileged())
}
pub fn read_u16_unaligned_with_priv(
&mut self,
address: u32,
privileged: bool,
) -> Result<u16, RunError> {
if address.is_aligned(2) {
self.read_u16_aligned_with_priv(address, privileged)
} else if self.system_control.borrow().ccr.unalign_trp() {
self.system_control.borrow_mut().cfsr.set_unaligned(true);
self.exception_taken(Irq::UsageFault)?;
Ok(0)
} else {
let v0 = self.read_u8_with_priv(address, privileged)?;
let v1 = self.read_u8_with_priv(address.wrapping_add(1), privileged)?;
if self.system_control.borrow().aircr.endianess() {
Ok(v1 as u16 | ((v0 as u16) << 8))
} else {
Ok(v0 as u16 | ((v1 as u16) << 8))
}
}
}
pub fn write_u16_unaligned_with_priv(
&mut self,
address: u32,
value: u16,
privileged: bool,
) -> Result<(), RunError> {
if address.is_aligned(2) {
self.write_u16_aligned_with_priv(address, value, privileged)
} else if self.system_control.borrow().ccr.unalign_trp() {
self.system_control.borrow_mut().cfsr.set_unaligned(true);
self.exception_taken(Irq::UsageFault)?;
Ok(())
} else {
let v0 = value as u8;
let v1 = (value >> 8) as u8;
if self.system_control.borrow().aircr.endianess() {
self.write_u8_with_priv(address, v1, privileged)?;
self.write_u8_with_priv(address.wrapping_add(1), v0, privileged)?;
} else {
self.write_u8_with_priv(address, v0, privileged)?;
self.write_u8_with_priv(address.wrapping_add(1), v1, privileged)?;
}
Ok(())
}
}
pub fn read_u32_unaligned_with_priv(
&mut self,
address: u32,
privileged: bool,
) -> Result<u32, RunError> {
if address.is_aligned(4) {
self.read_u32_aligned_with_priv(address, privileged)
} else if self.system_control.borrow().ccr.unalign_trp() {
self.system_control.borrow_mut().cfsr.set_unaligned(true);
self.exception_taken(Irq::UsageFault)?;
Ok(0)
} else {
let v0 = self.read_u8_with_priv(address, privileged)?;
let v1 = self.read_u8_with_priv(address.wrapping_add(1), privileged)?;
let v2 = self.read_u8_with_priv(address.wrapping_add(2), privileged)?;
let v3 = self.read_u8_with_priv(address.wrapping_add(3), privileged)?;
if self.system_control.borrow().aircr.endianess() {
Ok(v3 as u32 | ((v2 as u32) << 8) | ((v1 as u32) << 16) | ((v0 as u32) << 24))
} else {
Ok(v0 as u32 | ((v1 as u32) << 8) | ((v2 as u32) << 16) | ((v3 as u32) << 24))
}
}
}
pub fn write_u32_unaligned_with_priv(
&mut self,
address: u32,
value: u32,
privileged: bool,
) -> Result<(), RunError> {
if address.is_aligned(4) {
self.write_u32_aligned_with_priv(address, value, privileged)
} else if self.system_control.borrow().ccr.unalign_trp() {
self.system_control.borrow_mut().cfsr.set_unaligned(true);
self.exception_taken(Irq::UsageFault)?;
Ok(())
} else {
let v0 = value as u8;
let v1 = (value >> 8) as u8;
let v2 = (value >> 16) as u8;
let v3 = (value >> 24) as u8;
if self.system_control.borrow().aircr.endianess() {
self.write_u8_with_priv(address, v3, privileged)?;
self.write_u8_with_priv(address.wrapping_add(1), v2, privileged)?;
self.write_u8_with_priv(address.wrapping_add(2), v1, privileged)?;
self.write_u8_with_priv(address.wrapping_add(3), v0, privileged)?;
} else {
self.write_u8_with_priv(address, v0, privileged)?;
self.write_u8_with_priv(address.wrapping_add(1), v1, privileged)?;
self.write_u8_with_priv(address.wrapping_add(2), v2, privileged)?;
self.write_u8_with_priv(address.wrapping_add(3), v3, privileged)?;
}
Ok(())
}
}
pub fn read_u16_unaligned(&mut self, address: u32) -> Result<u16, RunError> {
self.read_u16_unaligned_with_priv(address, self.is_privileged())
}
pub fn write_u16_unaligned(&mut self, address: u32, value: u16) -> Result<(), RunError> {
self.write_u16_unaligned_with_priv(address, value, self.is_privileged())
}
pub fn read_u32_unaligned(&mut self, address: u32) -> Result<u32, RunError> {
self.read_u32_unaligned_with_priv(address, self.is_privileged())
}
pub fn write_u32_unaligned(&mut self, address: u32, value: u32) -> Result<(), RunError> {
self.write_u32_unaligned_with_priv(address, value, self.is_privileged())
}
pub fn read_u8_iface(&mut self, address: u32) -> Result<u8, RunError> {
let mut env = Env::new(self.cycles, self.is_privileged());
let mapping = self
.memory_mappings
.get_mut(address)
.ok_or(RunError::MemRead {
address,
size: 1,
cause: MemoryAccessError::InvalidAddress,
})?;
let read = mapping
.iface
.borrow_mut()
.read_u8(address - mapping.address, &mut env);
self.memory_op_actions.extend(env.actions);
match read {
Ok(val) => Ok(val),
Err(e) => Err(RunError::MemRead {
address,
size: 1,
cause: e,
}),
}
}
pub fn write_u8_iface(&mut self, address: u32, value: u8) -> Result<(), RunError> {
let mut env = Env::new(self.cycles, self.is_privileged());
let mapping = self
.memory_mappings
.get_mut(address)
.ok_or(RunError::MemWrite {
address,
size: 1,
value: value as u32,
cause: MemoryAccessError::InvalidAddress,
})?;
let write = mapping
.iface
.borrow_mut()
.write_u8(address - mapping.address, value, &mut env);
self.memory_op_actions.extend(env.actions);
match write {
Ok(()) => Ok(()),
Err(e) => Err(RunError::MemWrite {
address,
size: 1,
value: value as u32,
cause: e,
}),
}
}
pub fn write_u16le_iface(&mut self, address: u32, value: u16) -> Result<(), RunError> {
let mut env = Env::new(self.cycles, self.is_privileged());
let mapping = self
.memory_mappings
.get_mut(address)
.ok_or(RunError::MemWrite {
address,
size: 2,
value: value as u32,
cause: MemoryAccessError::InvalidAddress,
})?;
let write =
mapping
.iface
.borrow_mut()
.write_u16le(address - mapping.address, value, &mut env);
self.memory_op_actions.extend(env.actions);
match write {
Ok(()) => Ok(()),
Err(e) => Err(RunError::MemWrite {
address,
size: 2,
value: value as u32,
cause: e,
}),
}
}
pub fn read_u16le_iface(&mut self, address: u32) -> Result<u16, RunError> {
let mut env = Env::new(self.cycles, self.is_privileged());
if let Some(mapping) = self.memory_mappings.get_mut(address) {
let read = mapping
.iface
.borrow_mut()
.read_u16le(address - mapping.address, &mut env);
self.memory_op_actions.extend(env.actions);
match read {
Ok(val) => Ok(val),
Err(e) => Err(RunError::MemRead {
address,
size: 2,
cause: e,
}),
}
} else {
Err(RunError::MemRead {
address,
size: 2,
cause: MemoryAccessError::InvalidAddress,
})
}
}
pub fn read_u32le_iface(&mut self, address: u32) -> Result<u32, RunError> {
let mut env = Env::new(self.cycles, self.is_privileged());
let mapping = self
.memory_mappings
.get_mut(address)
.ok_or(RunError::MemRead {
address,
size: 4,
cause: MemoryAccessError::InvalidAddress,
})?;
let read = mapping
.iface
.borrow_mut()
.read_u32le(address - mapping.address, &mut env);
self.memory_op_actions.extend(env.actions);
match read {
Ok(val) => Ok(val),
Err(e) => Err(RunError::MemRead {
address,
size: 4,
cause: e,
}),
}
}
pub fn write_u32le_iface(&mut self, address: u32, value: u32) -> Result<(), RunError> {
let mut env = Env::new(self.cycles, self.is_privileged());
let mapping = self
.memory_mappings
.get_mut(address)
.ok_or(RunError::MemWrite {
address,
size: 4,
value,
cause: MemoryAccessError::InvalidAddress,
})?;
let write =
mapping
.iface
.borrow_mut()
.write_u32le(address - mapping.address, value, &mut env);
self.memory_op_actions.extend(env.actions);
write.map_err(|e| RunError::MemWrite {
address,
size: 4,
value,
cause: e,
})
}
fn validate_address(
&self,
_address: u32,
_is_priv: bool,
_is_write: bool,
_is_instr_fetch: bool,
) {
}
pub fn read_bytes_iface(&mut self, address: u32, size: u32) -> Result<Vec<u8>, RunError> {
let mut result = Vec::new();
result.reserve_exact(size as usize);
for a in address..address.checked_add(size).unwrap() {
result.push(self.read_u8_iface(a)?);
}
Ok(result)
}
pub fn write_bytes_iface(&mut self, address: u32, data: &[u8]) -> Result<(), RunError> {
for i in 0..data.len() as u32 {
self.write_u8_iface(address.checked_add(i).unwrap(), data[i as usize])?
}
Ok(())
}
pub fn sp(&self) -> u32 {
self.registers[13]
}
pub fn set(&mut self, index: RegisterIndex, value: u32) {
self.registers.set(index, value)
}
pub fn set_sp(&mut self, value: u32) {
*self.registers.sp_mut() = value
}
pub fn lr(&self) -> u32 {
self.registers[14]
}
pub fn set_lr(&mut self, value: u32) {
self.registers.lr = value
}
pub fn pc(&self) -> u32 {
self.registers.pc
}
pub fn set_pc(&mut self, value: u32) {
self.registers.pc = value;
}
fn decode_instruction(
&mut self,
address: u32,
) -> Result<(InstructionBox, InstructionSize), RunError> {
let hw = self.read_u16le_iface(address)?;
let it_state = self.registers.psr.it_state();
let size = InstructionSize::from_halfword(hw);
let ins = match size {
InstructionSize::Ins16 => {
self.instruction_decoder
.try_decode(hw as u32, InstructionSize::Ins16, it_state)?
}
InstructionSize::Ins32 => {
let hw2 = self.read_u16le_iface(address + 2)?;
self.instruction_decoder.try_decode(
((hw as u32) << 16) + hw2 as u32,
InstructionSize::Ins32,
it_state,
)?
}
};
Ok((ins, size))
}
fn step(&mut self) -> Result<(), RunError> {
if let Some(irq) = self.interrupt_requests.pop_first() {
let max_num = self.exception_active.len();
let num = irq.number();
assert!(
(num as usize) < max_num,
"Exception number too high: got {}, max is {}",
num,
max_num - 1
);
if self.state == State::WaitingForInterrupt {
self.state = State::Running;
}
if !self.exception_active[irq.number() as usize] {
self.exception_entry(irq)?;
}
}
let pc = self.pc();
if self
.code_hooks
.iter()
.any(|ch| ch.range.contains(&(pc as usize)))
{
self.events.push(Event::Hook { address: pc });
return Ok(());
}
match self.state {
State::Running => {
let (ins, effect) = self.execute_next_instruction()?;
self.events.push(Event::Instruction { ins });
match effect {
Effect::None => {}
Effect::Branch => {}
Effect::Break(i) => self.events.push(Event::Break(i)),
Effect::DebugHint(i) => self.events.push(Event::DebugHint(i)),
Effect::WaitForEvent => self.state = State::WaitingForEvent,
Effect::WaitForInterrupt => self.state = State::WaitingForInterrupt,
}
}
State::WaitingForEvent => {
if self.registers.event {
self.registers.event = false;
self.state = State::Running;
}
}
State::WaitingForInterrupt => {}
}
for action in self.memory_op_actions.iter() {
match action {
MemoryOpAction::Reset => self.events.push(Event::Reset),
MemoryOpAction::Irq(irq) => {
self.interrupt_requests.insert(*irq);
}
MemoryOpAction::Update(_) => panic!(), }
}
self.memory_op_actions.clear();
self.update_peripherals();
self.cycles += 1;
Ok(())
}
fn execute_next_instruction(&mut self) -> Result<(InstructionBox, Effect), RunError> {
let (ins, size) = self.decode_instruction(self.pc())?;
self.set_pc(self.pc() + 4);
let mut it_state = self.registers.psr.it_state();
let condition = ins
.condition()
.or(it_state.current_condition())
.unwrap_or(Condition::Always);
it_state.advance();
self.registers.psr.set_it_state(it_state);
let effect = if self.registers.psr.test(condition) {
ins.execute(self)?
} else {
Effect::None
};
if effect != Effect::Branch && size == InstructionSize::Ins16 {
self.set_pc(self.pc() - 2)
}
Ok((ins, effect))
}
pub fn update_peripherals(&mut self) {
let mut env = Env::new(self.cycles, self.is_privileged());
for mapping in self.memory_mappings.0.iter_mut() {
mapping.iface.borrow_mut().update(&mut env);
}
self.memory_op_actions.extend(env.actions);
}
pub fn request_interrupt(&mut self, irq: Irq) {
self.interrupt_requests.insert(irq);
}
fn exception_entry(&mut self, number: Irq) -> Result<(), RunError> {
self.push_stack()?;
self.exception_taken(number)?;
Ok(())
}
fn exception_return(&mut self, exc_return: u32) -> Result<(), RunError> {
assert_eq!(self.registers.mode, Mode::Handler);
let number = self.registers.psr.exception_number();
let nested_activation = false; if !self.exception_active[number as usize] {
self.deactivate(number);
todo!();
}
let frame_ptr = match exc_return & 0xf {
0b0001 => {
self.registers.mode = Mode::Handler;
self.registers.control.set_spsel(false);
self.registers.msp
}
0b1001 => {
if nested_activation && !self.system_control.borrow().ccr.nonbasethrdena() {
todo!()
} else {
self.registers.mode = Mode::Thread;
self.registers.control.set_spsel(false);
self.registers.msp
}
}
0b1101 => {
if nested_activation && !self.system_control.borrow().ccr.nonbasethrdena() {
todo!()
} else {
self.registers.mode = Mode::Thread;
self.registers.control.set_spsel(true);
self.registers.psp
}
}
_ => {
self.deactivate(number);
todo!()
}
};
self.deactivate(number);
self.pop_stack(frame_ptr, exc_return)?;
if self.registers.mode == Mode::Handler && self.registers.psr.ipsr() == 0 {
todo!();
}
if self.registers.mode == Mode::Thread && self.registers.psr.ipsr() != 0 {
todo!();
}
self.registers.event = true;
Ok(())
}
fn deactivate(&mut self, number: u16) {
self.exception_active[number as usize] = false;
if self.registers.psr.exception_number() != 2 {
self.registers.faultmask.set_pm(false);
}
}
fn push_stack(&mut self) -> Result<(), RunError> {
let frame_size = 0x20;
let force_align = self.system_control.borrow().ccr.stkalign();
let sp_mask = !((force_align as u32) << 2);
let frame_ptr_align = self.sp().bit(2) && force_align;
let frame_ptr = (self.sp() - frame_size) & sp_mask;
self.set_sp(frame_ptr);
let return_address = self.pc();
self.write_u32le_iface(frame_ptr, self.registers.r0)?;
self.write_u32le_iface(frame_ptr + 0x04, self.registers.r1)?;
self.write_u32le_iface(frame_ptr + 0x08, self.registers.r2)?;
self.write_u32le_iface(frame_ptr + 0x0c, self.registers.r3)?;
self.write_u32le_iface(frame_ptr + 0x10, self.registers.r12)?;
self.write_u32le_iface(frame_ptr + 0x14, self.registers.lr)?;
self.write_u32le_iface(frame_ptr + 0x18, return_address)?;
let mut xpsr = self.registers.psr.get();
xpsr.set_bit(9, frame_ptr_align);
self.write_u32le_iface(frame_ptr + 0x1c, xpsr)?;
let lr = match self.registers.mode {
Mode::Handler => 0xfffffff1,
Mode::Thread => {
if !self.registers.control.spsel() {
0xfffffff9
} else {
0xfffffffd
}
}
};
self.set_lr(lr);
Ok(())
}
fn pop_stack(&mut self, frame_ptr: u32, exc_return: u32) -> Result<(), RunError> {
let frame_size = 0x20;
let force_align = self.system_control.borrow().ccr.stkalign();
self.registers.r0 = self.read_u32le_iface(frame_ptr)?;
self.registers.r1 = self.read_u32le_iface(frame_ptr + 0x04)?;
self.registers.r2 = self.read_u32le_iface(frame_ptr + 0x08)?;
self.registers.r3 = self.read_u32le_iface(frame_ptr + 0x0c)?;
self.registers.r12 = self.read_u32le_iface(frame_ptr + 0x10)?;
self.registers.lr = self.read_u32le_iface(frame_ptr + 0x14)?;
let mut pc = self.read_u32le_iface(frame_ptr + 0x18)?;
if pc % 2 != 0 {
if !self.tolerate_pop_stack_unaligned_pc {
return Err(RunError::Unpredictable);
}
pc &= 0xfffffffe;
};
self.registers.pc = pc;
let psr = self.read_u32le_iface(frame_ptr + 0x1c)?;
let sp_mask = ((psr.bit(9) && force_align) as u32) << 2;
self.registers.psr.set(psr);
match exc_return & 0xf {
0b0001 | 0b1001 | 0b1101 => {
let new_sp = (self.sp() + frame_size) | sp_mask;
self.set_sp(new_sp);
}
_ => {}
}
self.registers.psr.set(psr & 0xfff0ffff); Ok(())
}
fn exception_taken(&mut self, number: Irq) -> Result<(), RunError> {
let vtor = self.system_control.borrow().vtor.offset();
let vector_address = number.number() as u32 * 4 + vtor;
let jump_address = self.read_u32le_iface(vector_address)?;
self.set_pc(jump_address & 0xfffffffe);
self.registers.mode = Mode::Handler;
self.registers
.psr
.set_exception_number(number.number())
.set_t(jump_address & 1 != 0)
.set_ici_it(0);
self.registers.control.set_spsel(false);
self.exception_active[number.number() as usize] = true;
self.registers.event = true;
Ok(())
}
pub fn is_privileged(&self) -> bool {
if self.registers.mode == Mode::Handler {
true
} else {
!self.registers.control.privileged_bit()
}
}
pub fn alu_write_pc(&mut self, address: u32) {
self.blx_write_pc(address)
}
pub fn blx_write_pc(&mut self, address: u32) {
self.registers.psr.set_t(address & 1 == 1);
self.set_pc(address & 0xfffffffe)
}
pub fn load_write_pc(&mut self, address: u32) -> Result<(), RunError> {
self.bx_write_pc(address)
}
pub fn bx_write_pc(&mut self, address: u32) -> Result<(), RunError> {
if self.registers.mode == Mode::Handler && (address >> 28 == 0xf) {
self.exception_return(address & 0x0fffffff)
} else {
self.blx_write_pc(address);
Ok(())
}
}
pub fn condition_passed(&self) -> bool {
if let Some(condition) = self.registers.psr.it_state().current_condition() {
self.registers.psr.test(condition)
} else {
true
}
}
pub fn coproc_accepted(&mut self, cp: u8, ins: u32) -> Option<Rc<RefCell<dyn Coprocessor>>> {
debug_assert!(cp < 16);
let coprocessor = self.coprocessors[cp as usize].clone()?;
if !coprocessor.borrow().accepted(ins) {
return None;
}
Some(coprocessor)
}
pub fn generate_coprocessor_exception(&mut self) {
self.request_interrupt(Irq::UsageFault);
self.system_control.borrow_mut().cfsr.set_nocp(true);
}
pub fn set_exclusive_monitors(&mut self, address: u32, size: u32) {
debug_assert!((size >= 1) && (size <= 4));
let granule = self.local_monitor.granule;
self.local_monitor.state = MonitorState::ExclusiveAccess {
address: address.align(granule as usize),
};
}
pub fn exclusive_monitors_pass(&mut self, address: u32, size: u32) -> Result<bool, RunError> {
self.usage_fault_if_unaligned(address, size as usize)?;
if self.local_monitor.state == (MonitorState::ExclusiveAccess { address: address }) {
self.clear_exclusive_local();
return Ok(true);
}
return Ok(false);
}
pub fn clear_exclusive_local(&mut self) {
self.local_monitor.state = MonitorState::OpenAccess;
}
}
#[derive(Default)]
pub struct RunOptions {
gas: Option<usize>,
}
impl RunOptions {
pub fn new() -> Self {
Self { gas: None }
}
pub fn gas(mut self, gas: usize) -> Self {
self.gas = Some(gas);
self
}
}
pub trait Emulator {
fn next_event(&mut self) -> Result<Event, RunError>;
fn run(&mut self, options: RunOptions) -> Result<Option<Event>, RunError> {
let mut ins_count = 0;
loop {
if let Some(count) = options.gas {
if ins_count == count {
return Ok(None);
}
}
let event = self.next_event()?;
match event {
Event::Hook { address: _ }
| Event::Reset
| Event::Break(_)
| Event::DebugHint(_) => return Ok(Some(event.clone())),
Event::Instruction { ins: _ } => ins_count += 1,
}
}
}
}
impl Emulator for Processor {
fn next_event(&mut self) -> Result<Event, RunError> {
while self.events.is_empty() {
self.step()?;
}
Ok(self.events.pop().unwrap())
}
}
impl Index<RegisterIndex> for Processor {
type Output = u32;
fn index(&self, index: RegisterIndex) -> &Self::Output {
&self.registers[index]
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum State {
Running,
WaitingForEvent,
WaitingForInterrupt,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Effect {
None,
Branch,
Break(u8),
DebugHint(u8),
WaitForEvent,
WaitForInterrupt,
}
#[derive(Debug)]
pub struct MapConflict;