midenc_codegen_masm/emulator/
breakpoints.rs1use std::collections::BTreeSet;
2
3use midenc_hir::FunctionIdent;
4use rustc_hash::{FxHashMap, FxHashSet};
5
6use super::{Addr, BreakpointEvent, EmulatorEvent, Instruction, InstructionPointer};
7use crate::BlockId;
8
9#[derive(Debug, Copy, Clone, PartialEq, Eq)]
16pub enum Breakpoint {
17 All,
19 Cycle(usize),
21 At(InstructionPointer),
23 Loops,
33 Called(FunctionIdent),
35 Watch(WatchpointId),
39}
40
41#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
43pub struct WatchpointId(usize);
44impl WatchpointId {
45 #[inline]
46 const fn index(self) -> usize {
47 self.0
48 }
49}
50
51#[derive(Debug, Copy, Clone, PartialEq, Eq)]
54pub struct Watchpoint {
55 pub addr: u32,
56 pub size: u32,
57 mode: WatchMode,
58}
59impl Watchpoint {
60 pub const fn new(addr: u32, size: u32, mode: WatchMode) -> Self {
61 Self { addr, size, mode }
62 }
63
64 pub fn mode(&self) -> WatchMode {
65 self.mode
66 }
67}
68
69#[derive(Debug, Copy, Clone, PartialEq, Eq)]
70pub enum WatchMode {
71 Break,
73 Warn,
75 Event,
77 Disabled,
79}
80
81#[derive(Debug, Default)]
85pub struct BreakpointManager {
86 break_every_cycle: bool,
88 pub break_on_return: bool,
90 break_loops: bool,
92 break_at_cycles: BTreeSet<usize>,
94 break_on_calls: FxHashSet<FunctionIdent>,
96 break_on_writes: Vec<Watchpoint>,
98 break_on_reached: FxHashMap<BlockId, FxHashSet<usize>>,
100}
101impl BreakpointManager {
102 pub fn watchpoints(&self) -> impl Iterator<Item = Watchpoint> + '_ {
104 self.break_on_writes.iter().copied()
105 }
106
107 #[allow(unused)]
108 pub fn has_watchpoints(&self) -> bool {
109 !self.break_on_writes.is_empty()
110 }
111
112 pub fn has_break_on_reached(&self) -> bool {
113 !self.break_on_reached.is_empty()
114 }
115
116 pub fn breakpoints(&self) -> impl Iterator<Item = Breakpoint> {
118 BreakpointIter::new(self)
119 }
120
121 pub fn watch(&mut self, addr: u32, size: u32, mode: WatchMode) -> WatchpointId {
124 let id = WatchpointId(self.break_on_writes.len());
125 self.break_on_writes.push(Watchpoint { addr, size, mode });
126 id
127 }
128
129 pub fn watch_mode(&mut self, id: WatchpointId, mode: WatchMode) {
131 self.break_on_writes[id.index()].mode = mode;
132 }
133
134 pub fn unwatch(&mut self, id: WatchpointId) {
136 self.break_on_writes[id.index()].mode = WatchMode::Disabled;
137 }
138
139 pub fn unwatch_all(&mut self) {
141 self.break_on_writes.clear();
142 }
143
144 pub fn set(&mut self, bp: Breakpoint) {
146 use std::collections::hash_map::Entry;
147
148 match bp {
149 Breakpoint::All => {
150 self.break_every_cycle = true;
151 }
152 Breakpoint::Cycle(cycle) => {
153 self.break_at_cycles.insert(cycle);
154 }
155 Breakpoint::At(ip) => match self.break_on_reached.entry(ip.block) {
156 Entry::Vacant(entry) => {
157 entry.insert(FxHashSet::from_iter([ip.index]));
158 }
159 Entry::Occupied(mut entry) => {
160 entry.get_mut().insert(ip.index);
161 }
162 },
163 Breakpoint::Loops => {
164 self.break_loops = true;
165 }
166 Breakpoint::Called(id) => {
167 self.break_on_calls.insert(id);
168 }
169 Breakpoint::Watch(id) => {
170 self.break_on_writes[id.index()].mode = WatchMode::Break;
171 }
172 }
173 }
174
175 pub fn unset(&mut self, bp: Breakpoint) {
177 match bp {
178 Breakpoint::All => {
179 self.break_every_cycle = false;
180 }
181 Breakpoint::Cycle(cycle) => {
182 self.break_at_cycles.remove(&cycle);
183 }
184 Breakpoint::At(ip) => {
185 if let Some(indices) = self.break_on_reached.get_mut(&ip.block) {
186 indices.remove(&ip.index);
187 }
188 }
189 Breakpoint::Loops => {
190 self.break_loops = false;
191 }
192 Breakpoint::Called(id) => {
193 self.break_on_calls.remove(&id);
194 }
195 Breakpoint::Watch(id) => {
196 self.unwatch(id);
197 }
198 }
199 }
200
201 pub fn unset_all(&mut self) {
203 self.break_every_cycle = false;
204 self.break_at_cycles.clear();
205 self.break_loops = false;
206 self.break_on_calls.clear();
207 self.break_on_reached.clear();
208 }
209
210 pub fn clear(&mut self) {
212 self.unset_all();
213 self.unwatch_all();
214 }
215
216 pub fn break_on_return(&mut self, value: bool) {
218 self.break_on_return = value;
219 }
220
221 pub fn handle_event(
223 &mut self,
224 event: EmulatorEvent,
225 ip: Option<Instruction>,
226 ) -> Option<BreakpointEvent> {
227 use core::cmp::Ordering;
228
229 match event {
230 EmulatorEvent::EnterFunction(id) => {
231 if self.break_on_calls.contains(&id) {
232 Some(BreakpointEvent::Called(id))
233 } else {
234 None
235 }
236 }
237 EmulatorEvent::EnterLoop(block) if self.break_loops => {
238 Some(BreakpointEvent::Loop(block))
239 }
240 EmulatorEvent::EnterLoop(block) => {
241 if self.should_break_at(block, 0) {
242 Some(BreakpointEvent::Reached(InstructionPointer::new(block)))
243 } else {
244 None
245 }
246 }
247 EmulatorEvent::CycleStart(cycle) => {
248 let mut cycle_hit = false;
249 self.break_at_cycles.retain(|break_at_cycle| match cycle.cmp(break_at_cycle) {
250 Ordering::Equal => {
251 cycle_hit = true;
252 false
253 }
254 Ordering::Greater => false,
255 Ordering::Less => true,
256 });
257 if cycle_hit {
258 Some(BreakpointEvent::ReachedCycle(cycle))
259 } else if self.break_every_cycle {
260 Some(BreakpointEvent::Step)
261 } else {
262 None
263 }
264 }
265 EmulatorEvent::ExitFunction(_) if self.break_on_return => {
266 Some(BreakpointEvent::StepOut)
267 }
268 EmulatorEvent::ExitFunction(_)
269 | EmulatorEvent::ExitLoop(_)
270 | EmulatorEvent::Jump(_) => match ip {
271 Some(Instruction { ip, .. }) => {
272 let break_at_current_ip = self.should_break_at(ip.block, ip.index);
273 if break_at_current_ip {
274 Some(BreakpointEvent::Reached(ip))
275 } else if self.break_every_cycle {
276 Some(BreakpointEvent::Step)
277 } else {
278 None
279 }
280 }
281 None => {
282 if self.break_every_cycle {
283 Some(BreakpointEvent::Step)
284 } else {
285 None
286 }
287 }
288 },
289 EmulatorEvent::MemoryWrite { addr, size } => {
290 self.matches_watchpoint(addr, size).copied().map(BreakpointEvent::Watch)
291 }
292 EmulatorEvent::Stopped | EmulatorEvent::Suspended => None,
293 EmulatorEvent::Breakpoint(bp) => Some(bp),
294 }
295 }
296
297 pub fn should_break_at(&self, block: BlockId, index: usize) -> bool {
298 self.break_on_reached
299 .get(&block)
300 .map(|indices| indices.contains(&index))
301 .unwrap_or(false)
302 }
303
304 #[inline]
305 #[allow(unused)]
306 pub fn should_break_on_write(&self, addr: Addr, size: u32) -> bool {
307 self.matches_watchpoint(addr, size).is_some()
308 }
309
310 fn matches_watchpoint(&self, addr: Addr, size: u32) -> Option<&Watchpoint> {
311 let end_addr = addr + size;
312 self.break_on_writes.iter().find(|wp| {
313 let wp_end = wp.addr + wp.size;
314 if let WatchMode::Break = wp.mode {
315 addr <= wp_end && end_addr >= wp.addr
316 } else {
317 false
318 }
319 })
320 }
321}
322
323struct BreakpointIter {
324 bps: Vec<Breakpoint>,
325}
326impl BreakpointIter {
327 fn new(bpm: &BreakpointManager) -> Self {
328 let mut iter = BreakpointIter {
329 bps: Vec::with_capacity(4),
330 };
331 iter.bps.extend(bpm.break_on_writes.iter().enumerate().filter_map(|(i, wp)| {
332 if wp.mode == WatchMode::Break {
333 Some(Breakpoint::Watch(WatchpointId(i)))
334 } else {
335 None
336 }
337 }));
338 iter.bps.extend(bpm.break_at_cycles.iter().copied().map(Breakpoint::Cycle));
339 for (block, indices) in bpm.break_on_reached.iter() {
340 if indices.is_empty() {
341 continue;
342 }
343 let block = *block;
344 for index in indices.iter().copied() {
345 iter.bps.push(Breakpoint::At(InstructionPointer { block, index }))
346 }
347 }
348 if bpm.break_loops {
349 iter.bps.push(Breakpoint::Loops);
350 }
351 if bpm.break_every_cycle {
352 iter.bps.push(Breakpoint::All);
353 }
354 iter
355 }
356}
357impl Iterator for BreakpointIter {
358 type Item = Breakpoint;
359
360 fn next(&mut self) -> Option<Self::Item> {
361 self.bps.pop()
362 }
363}
364impl core::iter::FusedIterator for BreakpointIter {}