sleigh_rs/semantic/
varnode.rs1use std::collections::HashMap;
2
3use crate::meaning::Meaning;
4use crate::semantic::{
5 AttachLiteralId, AttachVarnodeId, SpaceId, ValueFmt, VarnodeId,
6};
7use crate::{
8 ContextId, FieldBits, NumberNonZeroUnsigned, NumberUnsigned, Span,
9};
10
11use super::inner::Sleigh;
12
13#[derive(Clone, Debug)]
14pub struct Bitrange {
15 pub location: Span,
16 pub bits: FieldBits,
17 pub varnode: VarnodeId,
18}
19
20#[derive(Clone, Debug)]
22pub struct Context {
23 pub(crate) name: Box<str>,
24 pub bitrange: Bitrange,
25 pub noflow: bool,
26 pub attach: ContextAttach,
27}
28
29impl Context {
30 pub fn name(&self) -> &str {
31 &self.name
32 }
33
34 pub fn is_signed(&self) -> bool {
35 match self.attach {
36 ContextAttach::NoAttach(fmt) => fmt.signed,
37 ContextAttach::Varnode(_) | ContextAttach::Literal(_) => false,
38 }
39 }
40
41 pub fn meaning(&self) -> Meaning {
42 match self.attach {
43 ContextAttach::NoAttach(fmt) => Meaning::NoAttach(fmt),
44 ContextAttach::Varnode(id) => Meaning::Varnode(id),
45 ContextAttach::Literal(id) => Meaning::Literal(id),
46 }
47 }
48}
49
50#[derive(Clone, Copy, Debug)]
51pub enum ContextAttach {
52 NoAttach(ValueFmt),
54 Varnode(AttachVarnodeId),
56 Literal(AttachLiteralId),
58 }
61
62#[derive(Clone, Debug)]
63pub struct Varnode {
64 pub(crate) name: Box<str>,
65 pub location: Span,
66 pub address: NumberUnsigned,
68 pub len_bytes: NumberNonZeroUnsigned,
70 pub space: SpaceId,
72}
73
74impl Varnode {
75 pub fn name(&self) -> &str {
76 &self.name
77 }
78}
79
80#[derive(Debug, Clone)]
83pub struct ContextMemoryMapping {
84 pub(crate) mapping: HashMap<ContextId, FieldBits>,
85 pub memory_bits: NumberUnsigned,
86}
87
88impl ContextMemoryMapping {
89 pub(crate) fn map_all(sleigh: &Sleigh) -> Self {
90 let mut context_mapping: HashMap<VarnodeId, Vec<ContextId>> =
92 HashMap::new();
93 for (i, context) in sleigh.contexts.iter().enumerate() {
94 context_mapping
95 .entry(context.bitrange.varnode)
96 .or_insert(vec![])
97 .push(ContextId(i));
98 }
99
100 for contexts in context_mapping.values_mut() {
102 contexts.sort_unstable_by(|x, y| {
103 let x = &sleigh.context(*x).bitrange.bits;
104 let y = &sleigh.context(*y).bitrange.bits;
105 match x.start().cmp(&y.start()) {
106 std::cmp::Ordering::Equal => {
107 x.len().get().cmp(&y.len().get())
108 }
109 x => x,
110 }
111 });
112 }
113
114 let mut mem_mapping: Vec<(ContextId, FieldBits)> =
117 Vec::with_capacity(sleigh.contexts.len());
118 for (_varnode_id, contexts) in context_mapping.into_iter() {
119 for current_context_id in contexts.into_iter() {
120 let current_context =
121 &sleigh.context(current_context_id).bitrange.bits;
122 let Some((last_context_id, last_bits)) = mem_mapping.last() else {
123 mem_mapping.push((
125 current_context_id,
126 FieldBits::new(0, current_context.len().get())
127 ));
128 continue
129 };
130 let last_context =
131 &sleigh.context(*last_context_id).bitrange.bits;
132 let current_context_start_bit = current_context.start();
133 let new_start =
137 if last_context.end().get() > current_context_start_bit {
138 let offset =
139 current_context_start_bit - last_context.start();
140 last_bits.start() + offset
141 } else {
142 last_bits.end().get()
143 };
144 mem_mapping.push((
145 current_context_id,
146 FieldBits::new(
147 new_start,
148 new_start + current_context.len().get(),
149 ),
150 ));
151 }
152 }
153 let memory_bits = mem_mapping
154 .last()
155 .map(|(_, last)| last.end().get())
156 .unwrap_or(0);
157 Self {
158 mapping: mem_mapping.into_iter().collect(),
159 memory_bits,
160 }
161 }
162 pub fn context(&self, id: ContextId) -> FieldBits {
163 self.mapping.get(&id).unwrap().to_owned()
164 }
165}