pub struct CodeBuffer { /* private fields */ }Expand description
A buffer of output to be produced, fixed up, and then emitted to a CodeSink in bulk.
This struct uses SmallVecs to support small-ish function bodies without
any heap allocation. As such, it will be several kilobytes large. This is
likely fine as long as it is stack-allocated for function emission then
thrown away; but beware if many buffer objects are retained persistently.
Implementations§
Source§impl CodeBuffer
impl CodeBuffer
pub fn new() -> Self
pub fn with_env(env: Environment) -> Self
pub fn clear(&mut self)
pub fn env(&self) -> &Environment
pub fn env_mut(&mut self) -> &mut Environment
pub fn data(&self) -> &[u8] ⓘ
pub fn data_mut(&mut self) -> &mut [u8] ⓘ
pub fn relocs(&self) -> &[AsmReloc]
pub fn put1(&mut self, value: u8)
pub fn put2(&mut self, value: u16)
pub fn put4(&mut self, value: u32)
pub fn put8(&mut self, value: u64)
pub fn write_u8(&mut self, value: u8)
pub fn write_u16(&mut self, value: u16)
pub fn write_u32(&mut self, value: u32)
pub fn write_u64(&mut self, value: u64)
pub fn add_symbol( &mut self, name: impl Into<ExternalName>, distance: RelocDistance, ) -> Sym
pub fn symbol_distance(&self, sym: Sym) -> RelocDistance
pub fn symbol_name(&self, sym: Sym) -> &ExternalName
pub fn get_label(&mut self) -> Label
pub fn get_label_for_constant(&mut self, constant: Constant) -> Label
pub fn add_constant(&mut self, constant: impl Into<ConstantData>) -> Constant
pub fn use_label_at_offset( &mut self, offset: CodeOffset, label: Label, kind: LabelUse, )
Sourcepub fn align_to(&mut self, align_to: CodeOffset)
pub fn align_to(&mut self, align_to: CodeOffset)
Align up to the given alignment.
pub fn cur_offset(&self) -> CodeOffset
pub fn bind_label(&mut self, label: Label)
pub fn label_offset(&self, label: Label) -> u32
pub fn add_reloc(&mut self, kind: Reloc, target: RelocTarget, addend: i64)
pub fn add_reloc_at_offset( &mut self, offset: CodeOffset, kind: Reloc, target: RelocTarget, addend: i64, )
pub fn reserve_patch_block( &mut self, size: CodeOffset, align: CodeOffset, ) -> Result<PatchBlockId, AsmError>
pub fn record_patch_block( &mut self, offset: CodeOffset, size: CodeOffset, align: CodeOffset, ) -> PatchBlockId
pub fn record_patch_site( &mut self, offset: CodeOffset, kind: LabelUse, target_offset: CodeOffset, ) -> PatchSiteId
pub fn record_label_patch_site( &mut self, offset: CodeOffset, label: Label, kind: LabelUse, ) -> PatchSiteId
Sourcepub fn emit_veneer(&mut self, label: Label, offset: CodeOffset, kind: LabelUse)
pub fn emit_veneer(&mut self, label: Label, offset: CodeOffset, kind: LabelUse)
Emits a “veneer” the kind code at offset to jump to label.
This will generate extra machine code, using kind, to get a
larger-jump-kind than kind allows. The code at offset is then
patched to jump to our new code, and then the new code is enqueued for
a fixup to get processed at some later time.
Sourcepub fn get_appended_space(&mut self, len: usize) -> &mut [u8] ⓘ
pub fn get_appended_space(&mut self, len: usize) -> &mut [u8] ⓘ
Reserve appended space and return a mutable slice referring to it.
Sourcepub fn island_needed(&mut self, distance: CodeOffset) -> bool
pub fn island_needed(&mut self, distance: CodeOffset) -> bool
Is an island needed within the next N bytes?
Sourcepub fn emit_island(&mut self, distance: CodeOffset)
pub fn emit_island(&mut self, distance: CodeOffset)
Emit all pending constants and required pending veneers.