1use smallvec::SmallVec;
2
3use crate::{
4 core::{
5 arch_traits::Arch,
6 buffer::{CodeBufferFinalized, CodeOffset, LabelUse},
7 jit_allocator::{JitAllocator, Span},
8 },
9 AsmError,
10};
11
12#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
13pub struct PatchBlockId(u32);
14
15impl PatchBlockId {
16 pub(crate) const fn from_index(index: usize) -> Self {
17 Self(index as u32)
18 }
19
20 pub const fn index(self) -> usize {
21 self.0 as usize
22 }
23}
24
25#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
26pub struct PatchSiteId(u32);
27
28impl PatchSiteId {
29 pub(crate) const fn from_index(index: usize) -> Self {
30 Self(index as u32)
31 }
32
33 pub const fn index(self) -> usize {
34 self.0 as usize
35 }
36}
37
38#[derive(Clone, Copy, Debug, PartialEq, Eq)]
39pub struct PatchBlock {
40 pub offset: CodeOffset,
41 pub size: CodeOffset,
42 pub align: CodeOffset,
43}
44
45#[derive(Clone, Copy, Debug, PartialEq, Eq)]
46pub struct PatchSite {
47 pub offset: CodeOffset,
48 pub kind: LabelUse,
49 pub current_target: CodeOffset,
50 pub addend: i64,
51}
52
53#[derive(Clone, Debug, PartialEq, Eq)]
54pub struct PatchCatalog {
55 arch: Arch,
56 blocks: SmallVec<[PatchBlock; 4]>,
57 sites: SmallVec<[PatchSite; 8]>,
58}
59
60impl PatchCatalog {
61 pub(crate) fn with_parts(
62 arch: Arch,
63 blocks: SmallVec<[PatchBlock; 4]>,
64 sites: SmallVec<[PatchSite; 8]>,
65 ) -> Self {
66 Self {
67 arch,
68 blocks,
69 sites,
70 }
71 }
72
73 pub fn arch(&self) -> Arch {
74 self.arch
75 }
76
77 pub fn is_empty(&self) -> bool {
78 self.blocks.is_empty() && self.sites.is_empty()
79 }
80
81 pub fn blocks(&self) -> &[PatchBlock] {
82 &self.blocks
83 }
84
85 pub fn sites(&self) -> &[PatchSite] {
86 &self.sites
87 }
88
89 pub fn block(&self, id: PatchBlockId) -> Option<&PatchBlock> {
90 self.blocks.get(id.index())
91 }
92
93 pub fn site(&self, id: PatchSiteId) -> Option<&PatchSite> {
94 self.sites.get(id.index())
95 }
96
97 fn site_mut(&mut self, id: PatchSiteId) -> Option<&mut PatchSite> {
98 self.sites.get_mut(id.index())
99 }
100}
101
102pub fn minimum_patch_alignment(arch: Arch) -> CodeOffset {
103 match arch {
104 Arch::AArch64 | Arch::AArch64BE | Arch::RISCV32 | Arch::RISCV64 => 4,
105 _ => 1,
106 }
107}
108
109pub fn fill_with_nops(arch: Arch, buffer: &mut [u8]) -> Result<(), AsmError> {
110 let pattern: &[u8] = match arch {
111 Arch::X86 | Arch::X64 => &[0x90],
112 Arch::AArch64 => &[0x1f, 0x20, 0x03, 0xd5],
113 Arch::AArch64BE => &[0xd5, 0x03, 0x20, 0x1f],
114 Arch::RISCV32 | Arch::RISCV64 => &[0x13, 0x00, 0x00, 0x00],
115 _ => return Err(AsmError::InvalidArgument),
116 };
117
118 if pattern.len() > 1 && buffer.len() % pattern.len() != 0 {
119 return Err(AsmError::InvalidArgument);
120 }
121
122 for chunk in buffer.chunks_mut(pattern.len()) {
123 chunk.copy_from_slice(pattern);
124 }
125
126 Ok(())
127}
128
129pub struct LoadedPatchableCode {
130 catalog: PatchCatalog,
131 span: Span,
132}
133
134impl LoadedPatchableCode {
135 pub(crate) fn new(span: Span, catalog: PatchCatalog) -> Self {
136 Self { catalog, span }
137 }
138
139 pub fn patch_catalog(&self) -> &PatchCatalog {
140 &self.catalog
141 }
142
143 pub const fn rx(&self) -> *const u8 {
144 self.span.rx()
145 }
146
147 pub const fn rw(&self) -> *mut u8 {
148 self.span.rw()
149 }
150
151 pub const fn span(&self) -> &Span {
152 &self.span
153 }
154
155 pub fn retarget_site(
156 &mut self,
157 jit_allocator: &mut JitAllocator,
158 id: PatchSiteId,
159 target_offset: CodeOffset,
160 ) -> Result<(), AsmError> {
161 let site = *self.catalog.site(id).ok_or(AsmError::InvalidArgument)?;
162 if !site.kind.can_reach(site.offset, target_offset) {
163 return Err(AsmError::TooLarge);
164 }
165
166 unsafe {
167 jit_allocator.write(&mut self.span, |span| {
168 let patch_size = site.kind.patch_size();
169 let patch_ptr = span.rw().wrapping_add(site.offset as usize);
170 let patch_slice = core::slice::from_raw_parts_mut(patch_ptr, patch_size);
171 site.kind
172 .patch_with_addend(patch_slice, site.offset, target_offset, site.addend);
173 })?;
174 }
175
176 self.catalog.site_mut(id).unwrap().current_target = target_offset;
177 Ok(())
178 }
179
180 pub fn rewrite_block(
181 &mut self,
182 jit_allocator: &mut JitAllocator,
183 id: PatchBlockId,
184 bytes: &[u8],
185 ) -> Result<(), AsmError> {
186 let block = *self.catalog.block(id).ok_or(AsmError::InvalidArgument)?;
187 if bytes.len() > block.size as usize {
188 return Err(AsmError::TooLarge);
189 }
190
191 unsafe {
192 jit_allocator.write(&mut self.span, |span| {
193 let block_ptr = span.rw().wrapping_add(block.offset as usize);
194 block_ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
195 let tail = core::slice::from_raw_parts_mut(
196 block_ptr.wrapping_add(bytes.len()),
197 block.size as usize - bytes.len(),
198 );
199 fill_with_nops(self.catalog.arch(), tail).expect("validated patch block size");
200 })?;
201 }
202
203 Ok(())
204 }
205}
206
207impl CodeBufferFinalized {
208 pub fn patch_catalog(&self) -> &PatchCatalog {
209 &self.patch_catalog
210 }
211
212 pub fn allocate_patched(
213 &self,
214 jit_allocator: &mut JitAllocator,
215 ) -> Result<LoadedPatchableCode, AsmError> {
216 let span = self.allocate(jit_allocator)?;
217 Ok(LoadedPatchableCode::new(span, self.patch_catalog.clone()))
218 }
219}