Skip to main content

asmkit/core/
patch.rs

1use smallvec::SmallVec;
2
3use crate::{
4    AsmError,
5    core::{
6        arch_traits::Arch,
7        buffer::{CodeBufferFinalized, CodeOffset, LabelUse},
8    },
9};
10
11#[cfg(feature = "jit")]
12use crate::core::jit_allocator::{JitAllocator, Span};
13#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
14pub struct PatchBlockId(u32);
15
16impl PatchBlockId {
17    pub(crate) const fn from_index(index: usize) -> Self {
18        Self(index as u32)
19    }
20
21    pub const fn index(self) -> usize {
22        self.0 as usize
23    }
24}
25
26#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
27pub struct PatchSiteId(u32);
28
29impl PatchSiteId {
30    pub(crate) const fn from_index(index: usize) -> Self {
31        Self(index as u32)
32    }
33
34    pub const fn index(self) -> usize {
35        self.0 as usize
36    }
37}
38
39#[derive(Clone, Copy, Debug, PartialEq, Eq)]
40pub struct PatchBlock {
41    pub offset: CodeOffset,
42    pub size: CodeOffset,
43    pub align: CodeOffset,
44}
45
46#[derive(Clone, Copy, Debug, PartialEq, Eq)]
47pub struct PatchSite {
48    pub offset: CodeOffset,
49    pub kind: LabelUse,
50    pub current_target: CodeOffset,
51    pub addend: i64,
52}
53
54#[derive(Clone, Debug, PartialEq, Eq)]
55pub struct PatchCatalog {
56    arch: Arch,
57    blocks: SmallVec<[PatchBlock; 4]>,
58    sites: SmallVec<[PatchSite; 8]>,
59}
60
61impl PatchCatalog {
62    pub(crate) fn with_parts(
63        arch: Arch,
64        blocks: SmallVec<[PatchBlock; 4]>,
65        sites: SmallVec<[PatchSite; 8]>,
66    ) -> Self {
67        Self {
68            arch,
69            blocks,
70            sites,
71        }
72    }
73
74    pub fn arch(&self) -> Arch {
75        self.arch
76    }
77
78    pub fn is_empty(&self) -> bool {
79        self.blocks.is_empty() && self.sites.is_empty()
80    }
81
82    pub fn blocks(&self) -> &[PatchBlock] {
83        &self.blocks
84    }
85
86    pub fn sites(&self) -> &[PatchSite] {
87        &self.sites
88    }
89
90    pub fn block(&self, id: PatchBlockId) -> Option<&PatchBlock> {
91        self.blocks.get(id.index())
92    }
93
94    pub fn site(&self, id: PatchSiteId) -> Option<&PatchSite> {
95        self.sites.get(id.index())
96    }
97
98    pub fn site_mut(&mut self, id: PatchSiteId) -> Option<&mut PatchSite> {
99        self.sites.get_mut(id.index())
100    }
101}
102
103pub fn minimum_patch_alignment(arch: Arch) -> CodeOffset {
104    match arch {
105        Arch::AArch64 | Arch::AArch64BE | Arch::RISCV32 | Arch::RISCV64 => 4,
106        _ => 1,
107    }
108}
109
110pub fn fill_with_nops(arch: Arch, buffer: &mut [u8]) -> Result<(), AsmError> {
111    let pattern: &[u8] = match arch {
112        Arch::X86 | Arch::X64 => &[0x90],
113        Arch::AArch64 => &[0x1f, 0x20, 0x03, 0xd5],
114        Arch::AArch64BE => &[0xd5, 0x03, 0x20, 0x1f],
115        Arch::RISCV32 | Arch::RISCV64 => &[0x13, 0x00, 0x00, 0x00],
116        _ => return Err(AsmError::InvalidArgument),
117    };
118
119    if pattern.len() > 1 && !buffer.len().is_multiple_of(pattern.len()) {
120        return Err(AsmError::InvalidArgument);
121    }
122
123    for chunk in buffer.chunks_mut(pattern.len()) {
124        chunk.copy_from_slice(pattern);
125    }
126
127    Ok(())
128}
129
130#[cfg(feature = "jit")]
131pub struct LoadedPatchableCode {
132    catalog: PatchCatalog,
133    span: Span,
134}
135
136#[cfg(feature = "jit")]
137impl LoadedPatchableCode {
138    pub(crate) fn new(span: Span, catalog: PatchCatalog) -> Self {
139        Self { catalog, span }
140    }
141
142    pub fn patch_catalog(&self) -> &PatchCatalog {
143        &self.catalog
144    }
145
146    pub const fn rx(&self) -> *const u8 {
147        self.span.rx()
148    }
149
150    pub const fn rw(&self) -> *mut u8 {
151        self.span.rw()
152    }
153
154    pub const fn span(&self) -> &Span {
155        &self.span
156    }
157
158    pub fn retarget_site(
159        &mut self,
160        jit_allocator: &mut JitAllocator,
161        id: PatchSiteId,
162        target_offset: CodeOffset,
163    ) -> Result<(), AsmError> {
164        let site = *self.catalog.site(id).ok_or(AsmError::InvalidArgument)?;
165        if !site.kind.can_reach(site.offset, target_offset) {
166            return Err(AsmError::TooLarge);
167        }
168
169        unsafe {
170            jit_allocator.write(&mut self.span, |span| {
171                let patch_size = site.kind.patch_size();
172                let patch_ptr = span.rw().wrapping_add(site.offset as usize);
173                let patch_slice = core::slice::from_raw_parts_mut(patch_ptr, patch_size);
174                site.kind
175                    .patch_with_addend(patch_slice, site.offset, target_offset, site.addend);
176            })?;
177        }
178
179        self.catalog.site_mut(id).unwrap().current_target = target_offset;
180        Ok(())
181    }
182
183    pub fn rewrite_block(
184        &mut self,
185        jit_allocator: &mut JitAllocator,
186        id: PatchBlockId,
187        bytes: &[u8],
188    ) -> Result<(), AsmError> {
189        let block = *self.catalog.block(id).ok_or(AsmError::InvalidArgument)?;
190        if bytes.len() > block.size as usize {
191            return Err(AsmError::TooLarge);
192        }
193
194        unsafe {
195            jit_allocator.write(&mut self.span, |span| {
196                let block_ptr = span.rw().wrapping_add(block.offset as usize);
197                block_ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
198                let tail = core::slice::from_raw_parts_mut(
199                    block_ptr.wrapping_add(bytes.len()),
200                    block.size as usize - bytes.len(),
201                );
202                fill_with_nops(self.catalog.arch(), tail).expect("validated patch block size");
203            })?;
204        }
205
206        Ok(())
207    }
208}
209
210impl CodeBufferFinalized {
211    pub fn patch_catalog(&self) -> &PatchCatalog {
212        &self.patch_catalog
213    }
214
215    #[cfg(feature = "jit")]
216    pub fn allocate_patched(
217        &self,
218        jit_allocator: &mut JitAllocator,
219    ) -> Result<LoadedPatchableCode, AsmError> {
220        let span = self.allocate(jit_allocator)?;
221        Ok(LoadedPatchableCode::new(span, self.patch_catalog.clone()))
222    }
223}