hopper_runtime/context.rs
1//! Execution context for Hopper programs.
2//!
3//! `Context` is the canonical execution object that Hopper handlers receive.
4//! It provides structured access to the program_id, accounts, and instruction
5//! data, with indexed access and validation helpers.
6//!
7//! Keep it boring: `Context` is the container for accounts, instruction data,
8//! and the instruction-scoped segment borrow registry. `AccountView` owns the
9//! actual access operations.
10
11use crate::account::AccountView;
12use crate::address::Address;
13use crate::audit::AccountAudit;
14use crate::error::ProgramError;
15use crate::layout::LayoutContract;
16use crate::segment_borrow::SegmentBorrowRegistry;
17use crate::ProgramResult;
18
19/// Execution context for a Hopper instruction handler.
20///
21/// Wraps the program_id, account slice, and instruction data into a single
22/// object with structured access patterns.
23///
24/// # Authored flow
25///
26/// ```ignore
27/// pub fn deposit(ctx: &Context, amount: u64) -> ProgramResult {
28/// let authority = ctx.account(0)?;
29/// let vault = ctx.account(1)?;
30///
31/// authority.require_signer()?;
32/// vault.require_writable()?;
33/// vault.check_disc(1)?;
34///
35/// let mut state = vault.load_mut::<VaultState>()?;
36/// state.balance = state.balance.checked_add(amount).ok_or(ProgramError::ArithmeticOverflow)?;
37/// Ok(())
38/// }
39/// ```
40pub struct Context<'a> {
41 /// The program's own address.
42 pub program_id: &'a Address,
43 /// All accounts passed to this instruction.
44 accounts: &'a [AccountView],
45 /// Raw instruction data (past the discriminator byte, if applicable).
46 pub instruction_data: &'a [u8],
47 /// Segment-level borrow tracking for fine-grained access control.
48 ///
49 /// Enables safe concurrent mutable access to non-overlapping regions
50 /// of the same account. This is what makes Hopper strictly safer than
51 /// raw Pinocchio without adding meaningful CU overhead.
52 /// Prefer the `borrows()` / `borrows_mut()` accessors in new code.
53 pub(crate) segment_borrows: SegmentBorrowRegistry,
54}
55
56impl<'a> Context<'a> {
57 /// Create a new context from the entrypoint parameters.
58 #[inline(always)]
59 pub fn new(
60 program_id: &'a Address,
61 accounts: &'a [AccountView],
62 instruction_data: &'a [u8],
63 ) -> Self {
64 Self {
65 program_id,
66 accounts,
67 instruction_data,
68 segment_borrows: SegmentBorrowRegistry::new(),
69 }
70 }
71
72 /// Program ID.
73 #[inline(always)]
74 pub fn program_id(&self) -> &Address {
75 self.program_id
76 }
77
78 /// Raw instruction data.
79 #[inline(always)]
80 pub fn instruction_data(&self) -> &'a [u8] {
81 self.instruction_data
82 }
83
84 /// Get an account by index.
85 #[inline(always)]
86 pub fn account(&self, index: usize) -> Result<&'a AccountView, ProgramError> {
87 self.accounts
88 .get(index)
89 .ok_or(ProgramError::NotEnoughAccountKeys)
90 }
91
92 /// Get an account by index (mutation-intent variant).
93 ///
94 /// Functionally identical to `account()` since `AccountView` uses
95 /// interior mutability for data access (`overlay_mut`, `load_mut`,
96 /// `try_borrow_mut`). The distinct name signals that the caller
97 /// intends to write through the returned reference.
98 #[inline(always)]
99 pub fn account_mut(&self, index: usize) -> Result<&'a AccountView, ProgramError> {
100 self.accounts
101 .get(index)
102 .ok_or(ProgramError::NotEnoughAccountKeys)
103 }
104
105 /// Get the total number of accounts.
106 #[inline(always)]
107 pub fn num_accounts(&self) -> usize {
108 self.accounts.len()
109 }
110
111 /// Get all accounts as a slice.
112 #[inline(always)]
113 pub fn accounts(&self) -> &'a [AccountView] {
114 self.accounts
115 }
116
117 /// Access the instruction-scoped segment borrow registry.
118 #[inline(always)]
119 pub fn borrows(&self) -> &SegmentBorrowRegistry {
120 &self.segment_borrows
121 }
122
123 /// Mutably access the instruction-scoped segment borrow registry.
124 #[inline(always)]
125 pub fn borrows_mut(&mut self) -> &mut SegmentBorrowRegistry {
126 &mut self.segment_borrows
127 }
128
129 /// Inspect the instruction account slice for duplicate aliases.
130 #[inline(always)]
131 pub fn audit_accounts(&self) -> AccountAudit<'a> {
132 AccountAudit::new(self.accounts)
133 }
134
135 /// Get the remaining accounts starting at `from`.
136 #[inline(always)]
137 pub fn remaining_accounts(&self, from: usize) -> &'a [AccountView] {
138 if from >= self.accounts.len() {
139 &[]
140 } else {
141 &self.accounts[from..]
142 }
143 }
144
145 /// Get remaining accounts in strict duplicate-rejecting mode.
146 #[inline(always)]
147 pub fn remaining_accounts_strict(
148 &self,
149 from: usize,
150 ) -> crate::remaining::RemainingAccounts<'a> {
151 let declared_end = from.min(self.accounts.len());
152 crate::remaining::RemainingAccounts::strict(
153 &self.accounts[..declared_end],
154 self.remaining_accounts(from),
155 )
156 }
157
158 /// Get remaining accounts in duplicate-preserving passthrough mode.
159 #[inline(always)]
160 pub fn remaining_accounts_passthrough(
161 &self,
162 from: usize,
163 ) -> crate::remaining::RemainingAccounts<'a> {
164 let declared_end = from.min(self.accounts.len());
165 crate::remaining::RemainingAccounts::passthrough(
166 &self.accounts[..declared_end],
167 self.remaining_accounts(from),
168 )
169 }
170
171 /// Require at least `n` accounts are present.
172 #[inline(always)]
173 pub fn require_accounts(&self, n: usize) -> ProgramResult {
174 if self.accounts.len() >= n {
175 Ok(())
176 } else {
177 Err(ProgramError::NotEnoughAccountKeys)
178 }
179 }
180
181 /// Require all account addresses to be unique.
182 #[inline(always)]
183 pub fn require_unique_accounts(&self) -> ProgramResult {
184 self.audit_accounts().require_all_unique()
185 }
186
187 /// Require that no duplicated account is writable in this instruction.
188 #[inline(always)]
189 pub fn require_unique_writable_accounts(&self) -> ProgramResult {
190 self.audit_accounts().require_unique_writable()
191 }
192
193 /// Require that no duplicated account is used as a signer role.
194 #[inline(always)]
195 pub fn require_unique_signer_accounts(&self) -> ProgramResult {
196 self.audit_accounts().require_unique_signers()
197 }
198
199 /// Require at least `n` bytes of instruction data.
200 #[inline(always)]
201 pub fn require_data_len(&self, n: usize) -> ProgramResult {
202 if self.instruction_data.len() >= n {
203 Ok(())
204 } else {
205 Err(ProgramError::InvalidInstructionData)
206 }
207 }
208
209 // --- Whole-Layout Typed Access ----------------------------------
210
211 /// Validate-and-load the full typed layout for an account.
212 ///
213 /// This is the indexed shortcut for `ctx.account(idx)?.load::<T>()`.
214 /// It's the canonical "Tier A" access path: the runtime checks the
215 /// Hopper header, validates the data length, and projects the typed
216 /// view in one inlined call. no extra cost over the spelled-out form.
217 #[inline(always)]
218 pub fn load<T: LayoutContract>(&self, index: usize) -> Result<crate::Ref<'_, T>, ProgramError> {
219 self.account(index)?.load::<T>()
220 }
221
222 /// Validate-and-load a mutable typed layout for an account.
223 ///
224 /// Indexed shortcut for `ctx.account(idx)?.load_mut::<T>()`. The
225 /// returned guard holds the account-level exclusive borrow until
226 /// it drops.
227 #[inline(always)]
228 pub fn load_mut<T: LayoutContract>(
229 &self,
230 index: usize,
231 ) -> Result<crate::RefMut<'_, T>, ProgramError> {
232 self.account(index)?.load_mut::<T>()
233 }
234
235 /// Cross-program load: validate ABI fingerprint without ownership check.
236 ///
237 /// Use this when reading an account whose owner is another program but
238 /// whose layout is published as a Hopper layout contract.
239 #[inline(always)]
240 pub fn load_cross_program<T: LayoutContract>(
241 &self,
242 index: usize,
243 ) -> Result<crate::Ref<'_, T>, ProgramError> {
244 self.account(index)?.load_cross_program::<T>()
245 }
246
247 // --- Segment-Level Access (fine-grained borrow tracking) --------
248
249 /// Register a read borrow for a segment of an account and return a
250 /// [`SegRef<T>`](crate::SegRef) that releases both the account-level
251 /// byte guard **and** the segment registry lease on drop.
252 ///
253 /// `index` is the account index. `abs_offset` is the absolute byte
254 /// offset within the account data (including header bytes).
255 ///
256 /// # Type Safety
257 ///
258 /// `T` must implement `Pod` (substrate-level "safe to overlay on
259 /// raw bytes" contract: every bit pattern valid, align-1, no
260 /// padding, no interior pointers). Segment borrow tracking
261 /// prevents conflicting write access to the same byte range for
262 /// the guard's lifetime.
263 ///
264 /// # Canonical path (audit ST1 / winning-architecture spec)
265 ///
266 /// Three variants exist for different offset sources:
267 ///
268 /// | Variant | Use when |
269 /// |---|---|
270 /// | [`segment_ref_typed`](Self::segment_ref_typed) (canonical) | Offset is a compile-time constant (the common case). The `const OFFSET: u32` generic becomes an immediate in the pointer arithmetic. |
271 /// | [`segment_ref_const`](Self::segment_ref_const) | Offset comes from a runtime [`Segment`] value (dispatching dynamically between named fields). |
272 /// | `segment_ref` (this method) | Offset is fully dynamic (iterating segments in a loop, for example). |
273 ///
274 /// `#[hopper::context]`-generated accessors default to the canonical
275 /// typed path; reach for the others only when the use case
276 /// genuinely needs a runtime offset.
277 #[inline(always)]
278 pub fn segment_ref<'b, T: crate::Pod>(
279 &'b mut self,
280 index: usize,
281 abs_offset: u32,
282 ) -> Result<crate::SegRef<'b, T>, ProgramError> {
283 let view = self
284 .accounts
285 .get(index)
286 .ok_or(ProgramError::NotEnoughAccountKeys)?;
287 view.segment_ref::<T>(
288 &mut self.segment_borrows,
289 abs_offset,
290 core::mem::size_of::<T>() as u32,
291 )
292 }
293
294 /// Register a write borrow for a segment of an account.
295 ///
296 /// Validates bounds, checks writable, and registers a leased
297 /// exclusive borrow, then returns a [`SegRefMut<T>`](crate::SegRefMut)
298 /// that releases on drop.
299 ///
300 /// This is the primitive that enables safe concurrent mutation of
301 /// non-overlapping account regions. Hopper's core innovation .
302 /// and the lease model (added post-audit) makes sequential
303 /// same-region borrows inside one instruction work correctly.
304 #[inline(always)]
305 pub fn segment_mut<'b, T: crate::Pod>(
306 &'b mut self,
307 index: usize,
308 abs_offset: u32,
309 ) -> Result<crate::SegRefMut<'b, T>, ProgramError> {
310 let view = self
311 .accounts
312 .get(index)
313 .ok_or(ProgramError::NotEnoughAccountKeys)?;
314 view.segment_mut::<T>(
315 &mut self.segment_borrows,
316 abs_offset,
317 core::mem::size_of::<T>() as u32,
318 )
319 }
320
321 /// Const-driven segment read: pass a compile-time [`Segment`] and the
322 /// account index. Lowers to the same pointer-plus-const-offset shape
323 /// as `segment_ref` but without the caller hand-rolling the offset +
324 /// size arguments.
325 #[inline(always)]
326 pub fn segment_ref_const<'b, T: crate::Pod>(
327 &'b mut self,
328 index: usize,
329 segment: crate::Segment,
330 ) -> Result<crate::SegRef<'b, T>, ProgramError> {
331 let view = self
332 .accounts
333 .get(index)
334 .ok_or(ProgramError::NotEnoughAccountKeys)?;
335 view.segment_ref_const::<T>(&mut self.segment_borrows, segment)
336 }
337
338 /// Const-driven exclusive segment access. Pair with
339 /// `#[hopper::state]` constants for zero-overhead field writes.
340 #[inline(always)]
341 pub fn segment_mut_const<'b, T: crate::Pod>(
342 &'b mut self,
343 index: usize,
344 segment: crate::Segment,
345 ) -> Result<crate::SegRefMut<'b, T>, ProgramError> {
346 let view = self
347 .accounts
348 .get(index)
349 .ok_or(ProgramError::NotEnoughAccountKeys)?;
350 view.segment_mut_const::<T>(&mut self.segment_borrows, segment)
351 }
352
353 /// Typed-segment read: the type and offset are both compile-time
354 /// constants, baked into a [`TypedSegment`] zero-sized marker.
355 #[inline(always)]
356 pub fn segment_ref_typed<'b, T: crate::Pod, const OFFSET: u32>(
357 &'b mut self,
358 index: usize,
359 segment: crate::TypedSegment<T, OFFSET>,
360 ) -> Result<crate::SegRef<'b, T>, ProgramError> {
361 let view = self
362 .accounts
363 .get(index)
364 .ok_or(ProgramError::NotEnoughAccountKeys)?;
365 view.segment_ref_typed::<T, OFFSET>(&mut self.segment_borrows, segment)
366 }
367
368 /// Typed-segment write. Mirrors [`segment_ref_typed`] for the
369 /// exclusive path.
370 #[inline(always)]
371 pub fn segment_mut_typed<'b, T: crate::Pod, const OFFSET: u32>(
372 &'b mut self,
373 index: usize,
374 segment: crate::TypedSegment<T, OFFSET>,
375 ) -> Result<crate::SegRefMut<'b, T>, ProgramError> {
376 let view = self
377 .accounts
378 .get(index)
379 .ok_or(ProgramError::NotEnoughAccountKeys)?;
380 view.segment_mut_typed::<T, OFFSET>(&mut self.segment_borrows, segment)
381 }
382
383 /// Explicit unsafe whole-account typed read.
384 #[inline(always)]
385 ///
386 /// # Safety
387 ///
388 /// Caller must uphold the invariants documented for this unsafe API before invoking it.
389 pub unsafe fn raw_ref<T: crate::Pod>(
390 &self,
391 index: usize,
392 ) -> Result<crate::Ref<'_, T>, ProgramError> {
393 let view = self
394 .accounts
395 .get(index)
396 .ok_or(ProgramError::NotEnoughAccountKeys)?;
397 // SAFETY: This block is part of Hopper's audited zero-copy/backend boundary; surrounding checks and caller contracts uphold the required raw-pointer, layout, and aliasing invariants.
398 unsafe { view.raw_ref::<T>() }
399 }
400
401 /// Explicit unsafe whole-account typed write.
402 #[inline(always)]
403 ///
404 /// # Safety
405 ///
406 /// Caller must uphold the invariants documented for this unsafe API before invoking it.
407 pub unsafe fn raw_mut<T: crate::Pod>(
408 &self,
409 index: usize,
410 ) -> Result<crate::RefMut<'_, T>, ProgramError> {
411 let view = self
412 .accounts
413 .get(index)
414 .ok_or(ProgramError::NotEnoughAccountKeys)?;
415 // SAFETY: This block is part of Hopper's audited zero-copy/backend boundary; surrounding checks and caller contracts uphold the required raw-pointer, layout, and aliasing invariants.
416 unsafe { view.raw_mut::<T>() }
417 }
418
419 /// Explicit unsafe escape hatch for whole-account typed projection.
420 ///
421 /// This bypasses segment borrow tracking. The caller is responsible for
422 /// alias safety and for using a type that matches the account bytes.
423 #[inline(always)]
424 ///
425 /// # Safety
426 ///
427 /// Caller must uphold the invariants documented for this unsafe API before invoking it.
428 pub unsafe fn raw_unchecked<T: crate::Pod>(
429 &self,
430 index: usize,
431 ) -> Result<crate::RefMut<'_, T>, ProgramError> {
432 // SAFETY: This block is part of Hopper's audited zero-copy/backend boundary; surrounding checks and caller contracts uphold the required raw-pointer, layout, and aliasing invariants.
433 unsafe { self.raw_mut::<T>(index) }
434 }
435
436 /// Canonical raw-pointer escape hatch to an account's data buffer.
437 ///
438 /// Returns a pointer to the first byte of `accounts[index]`'s data
439 /// region (after the runtime account header, before any Hopper
440 /// 16-byte layout header). The pointer is valid for reads and
441 /// writes for the lifetime of the account view and carries no
442 /// borrow-tracking obligations. Dereferencing it is `unsafe`
443 /// because the caller takes over alias-safety responsibility
444 /// that the segment registry normally upholds.
445 ///
446 /// This is the explicit power-user primitive the audit asks for:
447 /// safe code reaches for `segment_ref_typed` / `segment_mut_typed`
448 /// / the generated `ctx.<field>_segment_mut(...)` accessors; raw
449 /// code drops to `unsafe { ctx.as_mut_ptr(0)?.add(offset) as *mut T }`.
450 ///
451 /// # Safety
452 ///
453 /// The caller must guarantee no aliasing mutable borrow is held
454 /// on the same account for the duration of any write through the
455 /// returned pointer. The returned pointer must be dereferenced
456 /// within the `'info` lifetime of the account view; reading past
457 /// `AccountView::data_len()` is undefined behaviour.
458 #[cfg(feature = "hopper-native-backend")]
459 #[inline(always)]
460 pub unsafe fn as_mut_ptr(&self, index: usize) -> Result<*mut u8, ProgramError> {
461 let view = self
462 .accounts
463 .get(index)
464 .ok_or(ProgramError::NotEnoughAccountKeys)?;
465 view.require_writable()?;
466 // SAFETY: the account view is live for `'info` and
467 // `data_ptr` yields a pointer inside the loader-provided
468 // per-account buffer. Returning the untyped pointer transfers
469 // alias-safety to the caller as documented above.
470 Ok(view.data_ptr_unchecked())
471 }
472
473 /// Immutable sibling of [`as_mut_ptr`]. Returns a `*const u8`.
474 ///
475 /// Shared-borrow checking still runs, so calling this while an
476 /// exclusive borrow is live on the same account fails with
477 /// `AccountBorrowFailed`. The return value is safe to obtain; the
478 /// caller only needs `unsafe` to dereference it.
479 ///
480 /// [`as_mut_ptr`]: Self::as_mut_ptr
481 #[cfg(feature = "hopper-native-backend")]
482 #[inline(always)]
483 pub fn as_ptr(&self, index: usize) -> Result<*const u8, ProgramError> {
484 let view = self
485 .accounts
486 .get(index)
487 .ok_or(ProgramError::NotEnoughAccountKeys)?;
488 view.check_borrow()?;
489 Ok(view.data_ptr_unchecked() as *const u8)
490 }
491
492 /// Read instruction data as a typed value (unaligned, little-endian safe).
493 ///
494 /// Reads `size_of::<T>()` bytes starting at `offset` via `read_unaligned`.
495 /// Caller must ensure `T` is a plain-old-data type where all bit patterns
496 /// are valid.
497 #[inline(always)]
498 pub fn read_data<T: crate::Pod>(&self, offset: usize) -> Result<T, ProgramError> {
499 let end = offset
500 .checked_add(core::mem::size_of::<T>())
501 .ok_or(ProgramError::ArithmeticOverflow)?;
502 if self.instruction_data.len() < end {
503 return Err(ProgramError::InvalidInstructionData);
504 }
505 // SAFETY: bounds checked; `T: Pod` guarantees every bit
506 // pattern is valid and the type has no drop glue, so
507 // `read_unaligned` into instruction data is sound.
508 Ok(unsafe {
509 core::ptr::read_unaligned(self.instruction_data.as_ptr().add(offset) as *const T)
510 })
511 }
512
513 /// Get a byte slice from instruction data.
514 #[inline(always)]
515 pub fn data_slice(&self, offset: usize, len: usize) -> Result<&[u8], ProgramError> {
516 let end = offset
517 .checked_add(len)
518 .ok_or(ProgramError::ArithmeticOverflow)?;
519 if self.instruction_data.len() < end {
520 return Err(ProgramError::InvalidInstructionData);
521 }
522 Ok(&self.instruction_data[offset..end])
523 }
524
525 /// Read the first byte of instruction data as an instruction tag.
526 ///
527 /// Common pattern for byte-tag dispatch.
528 #[inline(always)]
529 pub fn instruction_tag(&self) -> Result<u8, ProgramError> {
530 self.instruction_data
531 .first()
532 .copied()
533 .ok_or(ProgramError::InvalidInstructionData)
534 }
535}