Skip to main content

hopper_runtime/
account.rs

1//! Hopper-owned account view for Solana programs.
2//!
3//! `AccountView` is the canonical typed state gateway for Hopper programs.
4//! It wraps the active backend's account representation behind a
5//! `#[repr(transparent)]` boundary, delegating all methods with zero-cost
6//! type conversion.
7//!
8//! Key capabilities:
9//! - Chainable validation (`check_signer()?.check_writable()?`)
10//! - Whole-layout typed access (`load::<T>()`, `load_mut::<T>()`)
11//! - Segment-aware typed access (`segment_ref`, `segment_mut`)
12//! - Explicit raw escape hatches (`raw_ref`, `raw_mut`)
13//! - Hopper header reading (disc, version, layout_id)
14//! - Packed flags for batch validation
15//! - Remaining accounts iterator
16
17use crate::address::{Address, address_eq};
18use crate::error::ProgramError;
19use crate::borrow::{Ref, RefMut};
20use crate::borrow_registry::{self, BorrowToken};
21use crate::compat::{self, BackendAccountView};
22use crate::field_map::FieldInfo;
23use crate::layout::LayoutContract;
24use crate::segment_borrow::SegmentBorrowRegistry;
25use crate::ProgramResult;
26
27// ══════════════════════════════════════════════════════════════════════
28//  AccountView -- Hopper's canonical typed state gateway
29// ══════════════════════════════════════════════════════════════════════
30
31/// Zero-copy view over a Solana account.
32///
33/// `AccountView` is the single canonical type for account access in
34/// Hopper programs. It wraps whatever backend is active and exposes a
35/// Hopper-owned API surface.
36///
37/// The `#[repr(transparent)]` layout guarantees that `&[backend::AccountView]`
38/// can be safely reinterpreted as `&[AccountView]` at the entrypoint
39/// boundary with zero conversion cost.
40#[repr(transparent)]
41#[derive(Clone, PartialEq, Eq)]
42pub struct AccountView {
43    inner: BackendAccountView,
44}
45
46// SAFETY: AccountView is safe to send between threads (BPF is single-threaded;
47// tests may need Send/Sync).
48unsafe impl Send for AccountView {}
49unsafe impl Sync for AccountView {}
50
51impl AccountView {
52    #[cfg(test)]
53    #[inline(always)]
54    pub(crate) fn from_backend(inner: BackendAccountView) -> Self {
55        Self { inner }
56    }
57
58    // ── Getters ──────────────────────────────────────────────────────
59
60    /// The account's public key.
61    #[inline(always)]
62    pub fn address(&self) -> &Address {
63        compat::account_address(&self.inner)
64    }
65
66    /// The owning program's address.
67    ///
68    /// # Safety
69    ///
70    /// The returned reference is invalidated if the account is assigned
71    /// to a new owner. The caller must ensure no concurrent mutation.
72    #[inline(always)]
73    pub unsafe fn owner(&self) -> &Address {
74        unsafe { compat::account_owner(&self.inner) }
75    }
76
77    /// Read the owner address as a copy (safe, no aliasing hazard).
78    #[inline(always)]
79    pub fn read_owner(&self) -> Address {
80        compat::read_owner(&self.inner)
81    }
82
83    /// Whether this account is owned by the given program.
84    #[inline(always)]
85    pub fn owned_by(&self, program: &Address) -> bool {
86        compat::owned_by(&self.inner, program)
87    }
88
89    /// Whether this account signed the transaction.
90    #[inline(always)]
91    pub fn is_signer(&self) -> bool {
92        self.inner.is_signer()
93    }
94
95    /// Whether this account is writable in the transaction.
96    #[inline(always)]
97    pub fn is_writable(&self) -> bool {
98        self.inner.is_writable()
99    }
100
101    /// Whether this account contains an executable program.
102    #[inline(always)]
103    pub fn executable(&self) -> bool {
104        self.inner.executable()
105    }
106
107    /// Current data length in bytes.
108    #[inline(always)]
109    pub fn data_len(&self) -> usize {
110        self.inner.data_len()
111    }
112
113    /// Current lamport balance.
114    #[inline(always)]
115    pub fn lamports(&self) -> u64 {
116        self.inner.lamports()
117    }
118
119    /// Whether the account data is empty.
120    #[inline(always)]
121    pub fn is_data_empty(&self) -> bool {
122        self.data_len() == 0
123    }
124
125    /// Set the lamport balance.
126    #[inline(always)]
127    pub fn set_lamports(&self, lamports: u64) {
128        self.inner.set_lamports(lamports);
129    }
130
131    // ── Borrow tracking ─────────────────────────────────────────────
132
133    /// Try to obtain a shared borrow of the account data.
134    #[inline(always)]
135    pub fn try_borrow(&self) -> Result<Ref<'_, [u8]>, ProgramError> {
136        let token = BorrowToken::shared(self.address())?;
137        match self.inner.try_borrow() {
138            Ok(data) => Ok(Ref::from_backend(data, token)),
139            Err(error) => {
140                drop(token);
141                Err(ProgramError::from(error))
142            }
143        }
144    }
145
146    /// Try to obtain an exclusive (mutable) borrow of the account data.
147    #[inline(always)]
148    pub fn try_borrow_mut(&self) -> Result<RefMut<'_, [u8]>, ProgramError> {
149        let token = BorrowToken::mutable(self.address())?;
150        match self.inner.try_borrow_mut() {
151            Ok(data) => Ok(RefMut::from_backend(data, token)),
152            Err(error) => {
153                drop(token);
154                Err(ProgramError::from(error))
155            }
156        }
157    }
158
159    // ── Segment-aware access ───────────────────────────────────────
160
161    /// Project a typed segment from this account with segment-level
162    /// borrow tracking.
163    ///
164    /// The runtime validates the requested byte range, registers a
165    /// **leased** read borrow in the provided instruction-scoped
166    /// registry, and returns a [`SegRef<T>`](crate::SegRef) that
167    /// releases the lease on drop. This replaces the pre-audit
168    /// "instruction-sticky" behaviour: the registry entry is now tied
169    /// to the returned guard's lifetime, so sequential patterns like
170    /// `let x = segment_ref…; drop(x); let y = segment_ref…;` work
171    /// exactly the way Rust callers expect.
172    ///
173    /// On the native backend (Solana), the inner `Ref<T>` uses the
174    /// flat `{ptr, state}` representation, no dummy slice guard,
175    /// no intermediate `Ref<[u8]>`.
176    ///
177    /// The explicit `'a` lifetime binds the returned `SegRef<'a, T>`
178    /// to the shorter of `&self` (the account) and `&mut borrows`
179    /// (the registry). Either outliving the other would let the guard
180    /// dangle.
181    #[inline(always)]
182    pub fn segment_ref<'a, T: crate::Pod>(
183        &'a self,
184        borrows: &'a mut SegmentBorrowRegistry,
185        abs_offset: u32,
186        size: u32,
187    ) -> Result<crate::SegRef<'a, T>, ProgramError> {
188        let expected_size = core::mem::size_of::<T>() as u32;
189        if size != expected_size {
190            return ProgramError::err_invalid_argument();
191        }
192
193        let end = abs_offset
194            .checked_add(size)
195            .ok_or(ProgramError::ArithmeticOverflow)?;
196        if end as usize > self.data_len() {
197            return ProgramError::err_data_too_small();
198        }
199
200        let borrow = borrows.register_leased_read(self.address(), abs_offset, size)?;
201
202        // Build the inner `Ref<T>` via the existing flat/projected path.
203        #[cfg(target_os = "solana")]
204        let inner: Ref<'_, T> = {
205            // SAFETY: size, overflow, and bounds already validated above.
206            let native_ref = unsafe {
207                self.inner.segment_ref_unchecked::<T>(abs_offset)
208            };
209            let native_ref = match native_ref {
210                Ok(nr) => nr,
211                Err(e) => {
212                    // Native guard could not be taken; undo the lease
213                    // we just registered so the instruction-level view
214                    // stays consistent.
215                    borrows.release(&borrow);
216                    return Err(ProgramError::from(e));
217                }
218            };
219            let (typed_ref, state_ptr) = native_ref.into_raw_parts();
220            Ref::from_segment(typed_ref as *const T, state_ptr)
221        };
222        #[cfg(not(target_os = "solana"))]
223        let inner: Ref<'_, T> = {
224            let data = match self.try_borrow() {
225                Ok(d) => d,
226                Err(e) => {
227                    borrows.release(&borrow);
228                    return Err(e);
229                }
230            };
231            let ptr = unsafe { data.as_bytes_ptr().add(abs_offset as usize) as *const T };
232            unsafe { data.project(ptr) }
233        };
234
235        // SAFETY: `borrow` was just registered in `borrows`; the
236        // lease we construct will swap-remove it on drop.
237        let lease = unsafe { crate::SegmentLease::new(borrows, borrow) };
238        Ok(crate::SegRef::new(inner, lease))
239    }
240
241    /// Project a mutable typed segment. Mirror of [`segment_ref`]; the
242    /// returned [`SegRefMut<T>`](crate::SegRefMut) carries both the
243    /// account-level exclusive borrow guard and the segment-registry
244    /// lease, so dropping it is a full release, no lingering entries.
245    #[inline(always)]
246    pub fn segment_mut<'a, T: crate::Pod>(
247        &'a self,
248        borrows: &'a mut SegmentBorrowRegistry,
249        abs_offset: u32,
250        size: u32,
251    ) -> Result<crate::SegRefMut<'a, T>, ProgramError> {
252        self.check_writable()?;
253
254        let expected_size = core::mem::size_of::<T>() as u32;
255        if size != expected_size {
256            return ProgramError::err_invalid_argument();
257        }
258
259        let end = abs_offset
260            .checked_add(size)
261            .ok_or(ProgramError::ArithmeticOverflow)?;
262        if end as usize > self.data_len() {
263            return ProgramError::err_data_too_small();
264        }
265
266        let borrow = borrows.register_leased_write(self.address(), abs_offset, size)?;
267
268        #[cfg(target_os = "solana")]
269        let inner: RefMut<'_, T> = {
270            let native_ref = unsafe {
271                self.inner.segment_mut_unchecked::<T>(abs_offset)
272            };
273            let native_ref = match native_ref {
274                Ok(nr) => nr,
275                Err(e) => {
276                    borrows.release(&borrow);
277                    return Err(ProgramError::from(e));
278                }
279            };
280            let (typed_ref, state_ptr) = native_ref.into_raw_parts();
281            RefMut::from_segment(typed_ref as *mut T, state_ptr)
282        };
283        #[cfg(not(target_os = "solana"))]
284        let inner: RefMut<'_, T> = {
285            let mut data = match self.try_borrow_mut() {
286                Ok(d) => d,
287                Err(e) => {
288                    borrows.release(&borrow);
289                    return Err(e);
290                }
291            };
292            let ptr = unsafe { data.as_bytes_mut_ptr().add(abs_offset as usize) as *mut T };
293            unsafe { data.project(ptr) }
294        };
295
296        let lease = unsafe { crate::SegmentLease::new(borrows, borrow) };
297        Ok(crate::SegRefMut::new(inner, lease))
298    }
299
300    // ── Const-driven segment access ─────────────────────────────────
301
302    /// Project a typed segment described by a compile-time [`Segment`].
303    ///
304    /// This is the "const-driven" access form the Hopper design demands:
305    /// the offset and size come from a `const SEG: Segment = ...;`
306    /// declaration generated by `#[hopper::state]` or written by hand,
307    /// so the call collapses to a single `ptr + const_offset` add on
308    /// Solana SBF. No runtime string lookup, no dynamic map, no search.
309    ///
310    /// `segment.offset` is the **absolute** offset from the start of
311    /// account data (i.e. past the Hopper header already folded in).
312    /// Construct it via `Segment::new(offset, size)` or
313    /// `Segment::body(body_offset, size)`, the latter adds
314    /// `HopperHeader::SIZE` for you.
315    ///
316    /// ```ignore
317    /// const BALANCE: Segment = Segment::body(0, 8);
318    /// let mut balance = vault.segment_ref_const::<u64>(&mut borrows, BALANCE)?;
319    /// ```
320    #[inline(always)]
321    pub fn segment_ref_const<'a, T: crate::Pod>(
322        &'a self,
323        borrows: &'a mut SegmentBorrowRegistry,
324        segment: crate::segment::Segment,
325    ) -> Result<crate::SegRef<'a, T>, ProgramError> {
326        self.segment_ref::<T>(borrows, segment.offset, segment.size)
327    }
328
329    /// Mutable const-Segment access. See [`segment_ref_const`] for the
330    /// contract, this is the exclusive variant.
331    #[inline(always)]
332    pub fn segment_mut_const<'a, T: crate::Pod>(
333        &'a self,
334        borrows: &'a mut SegmentBorrowRegistry,
335        segment: crate::segment::Segment,
336    ) -> Result<crate::SegRefMut<'a, T>, ProgramError> {
337        self.segment_mut::<T>(borrows, segment.offset, segment.size)
338    }
339
340    /// Project a typed segment described by a [`TypedSegment`].
341    ///
342    /// This is the tightest form of segment access Hopper exposes: both
343    /// the type `T` and the offset are compile-time constants baked
344    /// into the [`TypedSegment`] marker, so the call collapses to a
345    /// single `ptr + literal_offset` add with a literal size in the
346    /// bounds check. The marker argument is a zero-sized token, free
347    /// to pass around.
348    ///
349    /// ```ignore
350    /// const BALANCE: TypedSegment<WireU64, { HopperHeader::SIZE as u32 }>
351    ///     = TypedSegment::new();
352    /// let bal = vault.segment_ref_typed(&mut borrows, BALANCE)?;
353    /// ```
354    #[inline(always)]
355    pub fn segment_ref_typed<'a, T: crate::Pod, const OFFSET: u32>(
356        &'a self,
357        borrows: &'a mut SegmentBorrowRegistry,
358        _segment: crate::segment::TypedSegment<T, OFFSET>,
359    ) -> Result<crate::SegRef<'a, T>, ProgramError> {
360        self.segment_ref::<T>(borrows, OFFSET, core::mem::size_of::<T>() as u32)
361    }
362
363    /// Mutable typed-segment access. See [`segment_ref_typed`] for the
364    /// contract, this is the exclusive variant.
365    #[inline(always)]
366    pub fn segment_mut_typed<'a, T: crate::Pod, const OFFSET: u32>(
367        &'a self,
368        borrows: &'a mut SegmentBorrowRegistry,
369        _segment: crate::segment::TypedSegment<T, OFFSET>,
370    ) -> Result<crate::SegRefMut<'a, T>, ProgramError> {
371        self.segment_mut::<T>(borrows, OFFSET, core::mem::size_of::<T>() as u32)
372    }
373
374    // ── Zero-copy overlay access ─────────────────────────────────────
375
376
377
378    // ── Typed load (LayoutContract-aware) ────────────────────────────
379
380    /// Load a typed layout after validating the account header.
381    ///
382    /// This is the canonical "validate then project" path:
383    /// 1. Check disc, version, and layout_id match `T`
384    /// 2. Verify data length >= `T::SIZE`
385    /// 3. Return zero-copy reference into account data
386    ///
387    /// The returned reference begins at `T::TYPE_OFFSET`. Body-only layouts
388    /// project past the Hopper header; header-inclusive layouts project the
389    /// full account struct from byte 0.
390    ///
391    /// # Example
392    ///
393    /// ```ignore
394    /// let vault = account.load::<Vault>()?;
395    /// ```
396    #[inline(always)]
397    pub fn load<T: LayoutContract>(&self) -> Result<Ref<'_, T>, ProgramError> {
398        let data = self.try_borrow()?;
399        T::validate_header(&data)?;
400        if data.len() < T::required_len() {
401            return ProgramError::err_data_too_small();
402        }
403        let ptr = unsafe { data.as_bytes_ptr().add(T::TYPE_OFFSET) as *const T };
404        // SAFETY: Header and length validated above. `ptr` points into the borrowed bytes.
405        Ok(unsafe { data.project(ptr) })
406    }
407
408    /// Load a mutable typed layout after validating the account header.
409    ///
410    /// Same as `load()` but provides a mutable reference for in-place
411    /// state updates. Changes write directly to account data.
412    ///
413    /// # Example
414    ///
415    /// ```ignore
416    /// let mut vault = account.load_mut::<Vault>()?;
417    /// vault.balance = vault.balance.checked_add(amount)?;
418    /// ```
419    #[inline(always)]
420    pub fn load_mut<T: LayoutContract>(&self) -> Result<RefMut<'_, T>, ProgramError> {
421        let mut data = self.try_borrow_mut()?;
422        T::validate_header(&data)?;
423        if data.len() < T::required_len() {
424            return ProgramError::err_data_too_small();
425        }
426        let ptr = unsafe { data.as_bytes_mut_ptr().add(T::TYPE_OFFSET) as *mut T };
427        // SAFETY: Header and length validated above. `ptr` points into the borrowed bytes.
428        Ok(unsafe { data.project(ptr) })
429    }
430
431    /// Explicit raw typed read of the account buffer.
432    ///
433    /// This bypasses Hopper layout validation and segment tracking, but it still
434    /// respects the account-level borrow rules enforced by `try_borrow()`.
435    #[inline(always)]
436    pub unsafe fn raw_ref<T: crate::Pod>(&self) -> Result<Ref<'_, T>, ProgramError> {
437        let data = self.try_borrow()?;
438        if core::mem::size_of::<T>() > data.len() {
439            return Err(ProgramError::AccountDataTooSmall);
440        }
441        let ptr = data.as_ptr() as *const T;
442        Ok(unsafe { data.project(ptr) })
443    }
444
445    /// Explicit raw typed write of the account buffer.
446    ///
447    /// This bypasses Hopper layout validation and segment tracking, but it still
448    /// enforces writability and the account-level exclusive borrow rules.
449    #[inline(always)]
450    pub unsafe fn raw_mut<T: crate::Pod>(&self) -> Result<RefMut<'_, T>, ProgramError> {
451        self.check_writable()?;
452        let mut data = self.try_borrow_mut()?;
453        if core::mem::size_of::<T>() > data.len() {
454            return Err(ProgramError::AccountDataTooSmall);
455        }
456        let ptr = data.as_bytes_mut_ptr() as *mut T;
457        Ok(unsafe { data.project(ptr) })
458    }
459
460
461
462    /// Load a cross-program layout without ownership checks.
463    ///
464    /// Validates wire format (disc + layout_id + size) but does not check
465    /// that the account is owned by this program. Use for cross-program
466    /// reads where the account is owned by another program and you need
467    /// a typed, zero-copy view of its data.
468    ///
469    /// The layout_id check ensures ABI compatibility: if the other program
470    /// changes its layout, this will fail rather than silently misinterpret.
471    ///
472    /// # Example
473    ///
474    /// ```ignore
475    /// let other_vault = foreign_account.load_cross_program::<OtherVault>()?;
476    /// ```
477    #[inline(always)]
478    pub fn load_cross_program<T: LayoutContract>(&self) -> Result<Ref<'_, T>, ProgramError> {
479        let data = self.try_borrow()?;
480        if data.len() < T::required_len() {
481            return Err(ProgramError::AccountDataTooSmall);
482        }
483        T::check_disc(&data)?;
484        if let Some(id) = crate::layout::read_layout_id(&data) {
485            if *id != T::LAYOUT_ID {
486                return Err(ProgramError::InvalidAccountData);
487            }
488        } else {
489            return Err(ProgramError::AccountDataTooSmall);
490        }
491        let ptr = unsafe { data.as_bytes_ptr().add(T::TYPE_OFFSET) as *const T };
492        // SAFETY: Wire identity and size validated above.
493        Ok(unsafe { data.project(ptr) })
494    }
495
496    /// Read runtime layout metadata from this account's header.
497    ///
498    /// Returns `None` if the account data is too short for a Hopper header.
499    /// This is useful for runtime inspection, manager tooling, and schema
500    /// checking when the concrete layout type is not known at compile time.
501    #[inline(always)]
502    pub fn layout_info(&self) -> Option<crate::layout::LayoutInfo> {
503        let data = self.try_borrow().ok()?;
504        crate::layout::LayoutInfo::from_data(&data)
505    }
506
507    /// Compile-time field metadata for a layout contract.
508    #[inline(always)]
509    pub fn fields<T: LayoutContract>() -> &'static [FieldInfo] {
510        T::fields()
511    }
512
513    /// Find a compile-time field descriptor by name.
514    ///
515    /// This is a tooling/inspection helper that delegates to
516    /// `FieldMap::field_by_name`. It performs a const-driven linear
517    /// scan over `T::FIELDS` and is not intended for hot-path use -
518    /// programs should reach for the const offsets emitted by
519    /// `#[hopper::state]` instead.
520    #[inline]
521    pub fn field<T: LayoutContract>(name: &str) -> Option<&'static FieldInfo> {
522        <T as crate::field_map::FieldMap>::field_by_name(name)
523    }
524
525    /// Return the extension-region byte range for a layout that declares one.
526    ///
527    /// Callers can apply the returned range to a borrowed data slice when they
528    /// want to inspect or mutate extension bytes explicitly.
529    #[inline(always)]
530    pub fn extension_range<T: LayoutContract>(&self) -> Result<core::ops::Range<usize>, ProgramError> {
531        let offset = T::EXTENSION_OFFSET.ok_or(ProgramError::InvalidArgument)?;
532        let data_len = self.data_len();
533        if data_len < offset {
534            return Err(ProgramError::AccountDataTooSmall);
535        }
536        Ok(offset..data_len)
537    }
538
539    /// Borrow the extension/tail region declared by a layout contract.
540    #[inline(always)]
541    pub fn extension_bytes<T: LayoutContract>(&self) -> Result<Ref<'_, [u8]>, ProgramError> {
542        let offset = T::EXTENSION_OFFSET.ok_or(ProgramError::InvalidArgument)?;
543        let data = self.try_borrow()?;
544        if data.len() < offset {
545            return Err(ProgramError::AccountDataTooSmall);
546        }
547        Ok(data.slice_from(offset))
548    }
549
550    /// Mutably borrow the extension/tail region declared by a layout contract.
551    #[inline(always)]
552    pub fn extension_bytes_mut<T: LayoutContract>(&self) -> Result<RefMut<'_, [u8]>, ProgramError> {
553        let offset = T::EXTENSION_OFFSET.ok_or(ProgramError::InvalidArgument)?;
554        let data = self.try_borrow_mut()?;
555        if data.len() < offset {
556            return Err(ProgramError::AccountDataTooSmall);
557        }
558        Ok(data.slice_from(offset))
559    }
560
561    /// Initialize an account with the given layout contract header.
562    ///
563    /// Writes the disc, version, layout_id, and zeroes flags/reserved.
564    /// Call this when creating a new account before writing field data.
565    #[inline(always)]
566    pub fn init_layout<T: LayoutContract>(&self) -> ProgramResult {
567        let mut data = self.try_borrow_mut()?;
568        crate::layout::init_header::<T>(&mut data)
569    }
570
571    // ── Validation helpers ───────────────────────────────────────────
572
573    /// Validate that this account is a signer.
574    #[inline(always)]
575    pub fn require_signer(&self) -> ProgramResult {
576        if self.is_signer() { Ok(()) } else { ProgramError::err_missing_signer() }
577    }
578
579    /// Validate that this account is writable.
580    #[inline(always)]
581    pub fn require_writable(&self) -> ProgramResult {
582        if self.is_writable() { Ok(()) } else { ProgramError::err_immutable() }
583    }
584
585    /// Validate that this account is owned by the given program.
586    #[inline(always)]
587    pub fn require_owned_by(&self, program: &Address) -> ProgramResult {
588        if self.owned_by(program) { Ok(()) } else { ProgramError::err_incorrect_program() }
589    }
590
591    /// Validate signer + writable (common "payer" pattern).
592    #[inline(always)]
593    pub fn require_payer(&self) -> ProgramResult {
594        self.require_signer()?;
595        self.require_writable()
596    }
597
598    // ── Chainable validation ─────────────────────────────────────────
599
600    /// Chainable signer check.
601    #[inline(always)]
602    pub fn check_signer(&self) -> Result<&Self, ProgramError> {
603        if self.is_signer() { Ok(self) } else { ProgramError::err_missing_signer() }
604    }
605
606    /// Chainable writable check.
607    #[inline(always)]
608    pub fn check_writable(&self) -> Result<&Self, ProgramError> {
609        if self.is_writable() { Ok(self) } else { ProgramError::err_immutable() }
610    }
611
612    /// Chainable ownership check.
613    #[inline(always)]
614    pub fn check_owned_by(&self, program: &Address) -> Result<&Self, ProgramError> {
615        if self.owned_by(program) { Ok(self) } else { ProgramError::err_incorrect_program() }
616    }
617
618    /// Chainable discriminator check.
619    #[inline(always)]
620    pub fn check_disc(&self, expected: u8) -> Result<&Self, ProgramError> {
621        if self.disc() == expected { Ok(self) } else { Err(ProgramError::InvalidAccountData) }
622    }
623
624    /// Chainable non-empty data check.
625    #[inline(always)]
626    pub fn check_has_data(&self) -> Result<&Self, ProgramError> {
627        if !self.is_data_empty() { Ok(self) } else { Err(ProgramError::AccountDataTooSmall) }
628    }
629
630    /// Chainable executable check.
631    #[inline(always)]
632    pub fn check_executable(&self) -> Result<&Self, ProgramError> {
633        if self.executable() { Ok(self) } else { Err(ProgramError::InvalidArgument) }
634    }
635
636    /// Chainable address check.
637    #[inline(always)]
638    pub fn check_address(&self, expected: &Address) -> Result<&Self, ProgramError> {
639        if address_eq(self.address(), expected) { Ok(self) } else { Err(ProgramError::InvalidArgument) }
640    }
641
642    /// Chainable minimum data length check.
643    #[inline(always)]
644    pub fn check_data_len(&self, min_len: usize) -> Result<&Self, ProgramError> {
645        if self.data_len() >= min_len { Ok(self) } else { Err(ProgramError::AccountDataTooSmall) }
646    }
647
648    /// Chainable version check.
649    #[inline(always)]
650    pub fn check_version(&self, expected: u8) -> Result<&Self, ProgramError> {
651        if self.version() == expected { Ok(self) } else { Err(ProgramError::InvalidAccountData) }
652    }
653
654    /// Chainable full layout contract check (disc + version + layout_id + size).
655    #[inline(always)]
656    pub fn check_layout<T: LayoutContract>(&self) -> Result<&Self, ProgramError> {
657        let data = self.try_borrow()?;
658        T::validate_header(&data)?;
659        Ok(self)
660    }
661
662    // ── Hopper header readers ────────────────────────────────────────
663
664    /// Read the Hopper account discriminator (first byte of data).
665    #[inline(always)]
666    pub fn disc(&self) -> u8 {
667        compat::disc(&self.inner)
668    }
669
670    /// Read the Hopper account version (second byte of data).
671    #[inline(always)]
672    pub fn version(&self) -> u8 {
673        compat::version(&self.inner)
674    }
675
676    /// Read the 8-byte layout_id from the Hopper account header (bytes 4..12).
677    #[inline(always)]
678    pub fn layout_id(&self) -> Option<&[u8; 8]> {
679        compat::layout_id(&self.inner)
680    }
681
682    /// Verify that this account has the given discriminator.
683    #[inline(always)]
684    pub fn require_disc(&self, expected: u8) -> ProgramResult {
685        if self.disc() == expected { Ok(()) } else { Err(ProgramError::InvalidAccountData) }
686    }
687
688    // ── Packed flags ─────────────────────────────────────────────────
689
690    /// Pack the account's boolean flags into a single byte.
691    ///
692    /// Bit layout: bit 0 = signer, bit 1 = writable, bit 2 = executable,
693    /// bit 3 = has data.
694    #[inline(always)]
695    pub fn flags(&self) -> u8 {
696        let mut f: u8 = 0;
697        if self.is_signer() { f |= 0b0001; }
698        if self.is_writable() { f |= 0b0010; }
699        if self.executable() { f |= 0b0100; }
700        if !self.is_data_empty() { f |= 0b1000; }
701        f
702    }
703
704    /// Check that the account's flags contain all required bits.
705    #[inline(always)]
706    pub fn expect_flags(&self, required: u8) -> ProgramResult {
707        if self.flags() & required == required { Ok(()) } else { Err(ProgramError::InvalidArgument) }
708    }
709
710    // ── Resize / Close ───────────────────────────────────────────────
711
712    /// Resize the account data.
713    #[inline]
714    pub fn resize(&self, new_len: usize) -> ProgramResult {
715        self.inner.resize(new_len).map_err(ProgramError::from)
716    }
717
718    /// Assign a new owner.
719    ///
720    /// # Safety
721    ///
722    /// The caller must ensure the account is writable and that ownership
723    /// transfer is authorized.
724    #[inline(always)]
725    pub unsafe fn assign(&self, new_owner: &Address) {
726        unsafe { compat::assign(&self.inner, new_owner); }
727    }
728
729    /// Close the account: zero lamports and data.
730    #[inline]
731    pub fn close(&self) -> ProgramResult {
732        compat::close(&self.inner)
733    }
734
735    /// Close the account, transferring remaining lamports to `destination`.
736    ///
737    /// Idiomatic Solana close pattern: move all lamports to the
738    /// destination account, then zero this account's data so the
739    /// runtime garbage-collects it at the end of the transaction.
740    ///
741    /// # Preconditions (enforced)
742    ///
743    /// Per Solana's account modification rules (only the owning program
744    /// can debit lamports or mutate data on a writable account), this
745    /// method requires:
746    ///
747    /// - `self` must be **writable**, otherwise the runtime will
748    ///   reject the commit anyway, but we fail fast here rather than
749    ///   let the transaction progress through an invalid state.
750    /// - `self` must be **owned by `program_id`**, the program that
751    ///   is executing this instruction. Without this check the safe
752    ///   API would silently encourage patterns that only Solana's
753    ///   post-instruction verifier catches.
754    /// - `destination` must be **writable**, receiving lamports
755    ///   requires write permission on the credit side.
756    ///
757    /// This is the Hopper Safety Audit's recommended tightening: the
758    /// pre-audit version mutated lamports and zeroed data without
759    /// checking either side, relying on the runtime to reject the
760    /// transaction later. The audit flagged that as "encouraging
761    /// patterns that will only be rejected later", the safe API
762    /// should surface the violation at call time.
763    #[inline]
764    pub fn close_to(
765        &self,
766        destination: &AccountView,
767        program_id: &Address,
768    ) -> ProgramResult {
769        self.require_writable()?;
770        self.require_owned_by(program_id)?;
771        destination.require_writable()?;
772
773        let lamports = self.lamports();
774        let dest_lamports = destination.lamports();
775        destination.set_lamports(
776            dest_lamports
777                .checked_add(lamports)
778                .ok_or(ProgramError::ArithmeticOverflow)?,
779        );
780        self.set_lamports(0);
781        compat::zero_data(&self.inner)?;
782        Ok(())
783    }
784
785    /// Unchecked variant of [`close_to`].
786    ///
787    /// Retained for the rare caller that has already verified the
788    /// preconditions (e.g. inside a validated `#[hopper::context]`
789    /// binding). **Does not** check writable or owner, so only use it
790    /// when the preconditions are guaranteed by the surrounding code.
791    #[inline]
792    pub fn close_to_unchecked(&self, destination: &AccountView) -> ProgramResult {
793        let lamports = self.lamports();
794        let dest_lamports = destination.lamports();
795        destination.set_lamports(
796            dest_lamports
797                .checked_add(lamports)
798                .ok_or(ProgramError::ArithmeticOverflow)?,
799        );
800        self.set_lamports(0);
801        compat::zero_data(&self.inner)?;
802        Ok(())
803    }
804
805    // ── Raw access (hopper-native-backend only) ──────────────────────
806
807    /// Unchecked raw pointer to the first byte of account data.
808    #[cfg(feature = "hopper-native-backend")]
809    #[inline(always)]
810    pub(crate) fn data_ptr_unchecked(&self) -> *mut u8 {
811        self.inner.data_ptr_unchecked()
812    }
813
814    /// Raw pointer to the RuntimeAccount header.
815    #[cfg(feature = "hopper-native-backend")]
816    #[inline(always)]
817    pub(crate) fn account_ptr(&self) -> *const hopper_native::RuntimeAccount {
818        self.inner.account_ptr()
819    }
820
821    /// Check that the account can be shared-borrowed.
822    #[inline(always)]
823    pub fn check_borrow(&self) -> Result<(), ProgramError> {
824        borrow_registry::check_shared(self.address())?;
825        self.inner.check_borrow().map_err(ProgramError::from)
826    }
827
828    /// Check that the account can be exclusively borrowed.
829    #[inline(always)]
830    pub fn check_borrow_mut(&self) -> Result<(), ProgramError> {
831        borrow_registry::check_mutable(self.address())?;
832        self.inner.check_borrow_mut().map_err(ProgramError::from)
833    }
834
835    /// Borrow account data without tracking.
836    ///
837    /// # Safety
838    ///
839    /// The caller must ensure no mutable borrow is active.
840    #[inline(always)]
841    pub unsafe fn borrow_unchecked(&self) -> &[u8] {
842        unsafe { self.inner.borrow_unchecked() }
843    }
844
845    /// Mutably borrow account data without tracking.
846    ///
847    /// # Safety
848    ///
849    /// The caller must ensure no other borrows are active.
850    #[inline(always)]
851    pub unsafe fn borrow_unchecked_mut(&self) -> &mut [u8] {
852        unsafe { self.inner.borrow_unchecked_mut() }
853    }
854
855    /// Resize without bounds checking.
856    ///
857    /// # Safety
858    ///
859    /// The caller must guarantee the new length is within the permitted increase.
860    #[cfg(feature = "hopper-native-backend")]
861    #[inline(always)]
862    pub unsafe fn resize_unchecked(&self, new_len: usize) {
863        unsafe { self.inner.resize_unchecked(new_len); }
864    }
865
866    /// Close without borrow checks.
867    ///
868    /// # Safety
869    ///
870    /// The caller must ensure no active borrows exist.
871    #[inline(always)]
872    pub unsafe fn close_unchecked(&self) {
873        unsafe { self.inner.close_unchecked(); }
874    }
875
876    // ── Backend access ───────────────────────────────────────────────
877
878    /// Access the active backend account view inside the runtime crate.
879    #[cfg(feature = "solana-program-backend")]
880    #[inline(always)]
881    pub(crate) fn as_backend(&self) -> &BackendAccountView {
882        &self.inner
883    }
884
885}
886
887impl core::fmt::Debug for AccountView {
888    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
889        f.debug_struct("AccountView")
890            .field("address", self.address())
891            .field("lamports", &self.lamports())
892            .field("data_len", &self.data_len())
893            .field("is_signer", &self.is_signer())
894            .field("is_writable", &self.is_writable())
895            .finish()
896    }
897}
898
899// ── RemainingAccounts ────────────────────────────────────────────────
900
901/// Iterator over remaining (unstructured) accounts.
902pub struct RemainingAccounts<'a> {
903    accounts: &'a [AccountView],
904    cursor: usize,
905}
906
907impl<'a> RemainingAccounts<'a> {
908    /// Create from a slice of accounts.
909    #[inline(always)]
910    pub fn new(accounts: &'a [AccountView]) -> Self {
911        Self { accounts, cursor: 0 }
912    }
913
914    /// Number of accounts remaining.
915    #[inline(always)]
916    pub fn remaining(&self) -> usize {
917        self.accounts.len() - self.cursor
918    }
919
920    /// Take the next account, or return `NotEnoughAccountKeys`.
921    #[inline(always)]
922    pub fn next(&mut self) -> Result<&'a AccountView, ProgramError> {
923        if self.cursor >= self.accounts.len() {
924            return Err(ProgramError::NotEnoughAccountKeys);
925        }
926        let account = &self.accounts[self.cursor];
927        self.cursor += 1;
928        Ok(account)
929    }
930
931    /// Take the next account that is a signer.
932    #[inline(always)]
933    pub fn next_signer(&mut self) -> Result<&'a AccountView, ProgramError> {
934        let account = self.next()?;
935        account.require_signer()?;
936        Ok(account)
937    }
938
939    /// Take the next account that is writable.
940    #[inline(always)]
941    pub fn next_writable(&mut self) -> Result<&'a AccountView, ProgramError> {
942        let account = self.next()?;
943        account.require_writable()?;
944        Ok(account)
945    }
946
947    /// Take the next account owned by the given program.
948    #[inline(always)]
949    pub fn next_owned_by(&mut self, program: &Address) -> Result<&'a AccountView, ProgramError> {
950        let account = self.next()?;
951        account.require_owned_by(program)?;
952        Ok(account)
953    }
954}
955
956#[cfg(all(test, feature = "hopper-native-backend"))]
957mod tests {
958    use super::*;
959    use crate::layout::HopperHeader;
960
961    use hopper_native::{AccountView as NativeAccountView, Address as NativeAddress, RuntimeAccount, NOT_BORROWED};
962
963    #[repr(C)]
964    #[derive(Clone, Copy, Debug, Default)]
965    struct TestLayout {
966        a: u64,
967        b: u64,
968    }
969
970    #[repr(C)]
971    #[derive(Clone, Copy, Debug)]
972    struct HeaderLayout {
973        header: HopperHeader,
974        amount: u64,
975    }
976
977    impl crate::field_map::FieldMap for TestLayout {
978        const FIELDS: &'static [crate::field_map::FieldInfo] = &[
979            crate::field_map::FieldInfo::new("a", HopperHeader::SIZE, 8),
980            crate::field_map::FieldInfo::new("b", HopperHeader::SIZE + 8, 8),
981        ];
982    }
983
984    impl LayoutContract for TestLayout {
985        const DISC: u8 = 7;
986        const VERSION: u8 = 1;
987        const LAYOUT_ID: [u8; 8] = [0xAB; 8];
988        const SIZE: usize = HopperHeader::SIZE + core::mem::size_of::<Self>();
989        const EXTENSION_OFFSET: Option<usize> = Some(Self::SIZE);
990    }
991
992    impl crate::field_map::FieldMap for HeaderLayout {
993        const FIELDS: &'static [crate::field_map::FieldInfo] = &[
994            crate::field_map::FieldInfo::new("amount", HopperHeader::SIZE, 8),
995        ];
996    }
997
998    impl LayoutContract for HeaderLayout {
999        const DISC: u8 = 11;
1000        const VERSION: u8 = 2;
1001        const LAYOUT_ID: [u8; 8] = [0xCD; 8];
1002        const SIZE: usize = core::mem::size_of::<Self>();
1003        const TYPE_OFFSET: usize = 0;
1004    }
1005
1006    fn make_account(total_data_len: usize, address_byte: u8) -> (std::vec::Vec<u8>, AccountView) {
1007        let mut backing = std::vec![0u8; RuntimeAccount::SIZE + total_data_len];
1008        let raw = backing.as_mut_ptr() as *mut RuntimeAccount;
1009        unsafe {
1010            raw.write(RuntimeAccount {
1011                borrow_state: NOT_BORROWED,
1012                is_signer: 1,
1013                is_writable: 1,
1014                executable: 0,
1015                resize_delta: 0,
1016                address: NativeAddress::new_from_array([address_byte; 32]),
1017                owner: NativeAddress::new_from_array([2; 32]),
1018                lamports: 42,
1019                data_len: total_data_len as u64,
1020            });
1021        }
1022        let backend = unsafe { NativeAccountView::new_unchecked(raw) };
1023        let account = AccountView::from_backend(backend);
1024        (backing, account)
1025    }
1026
1027    #[test]
1028    fn load_mut_is_zero_copy_and_pointer_stable() {
1029        let (_backing, account) = make_account(TestLayout::SIZE + 8, 1);
1030
1031        {
1032            let mut data = account.try_borrow_mut().unwrap();
1033            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1034            data[HopperHeader::SIZE..HopperHeader::SIZE + 8].copy_from_slice(&10u64.to_le_bytes());
1035            data[HopperHeader::SIZE + 8..HopperHeader::SIZE + 16].copy_from_slice(&20u64.to_le_bytes());
1036            data[TestLayout::SIZE..TestLayout::SIZE + 8].copy_from_slice(b"tailpass");
1037        }
1038
1039        let first_ptr = {
1040            let first = account.load::<TestLayout>().unwrap();
1041            assert_eq!(first.a, 10);
1042            assert_eq!(first.b, 20);
1043            first.as_ptr() as usize
1044        };
1045
1046        {
1047            let tail = account.extension_bytes::<TestLayout>().unwrap();
1048            assert_eq!(&tail[..8], b"tailpass");
1049        }
1050
1051        let mut second = account.load_mut::<TestLayout>().unwrap();
1052        let second_ptr = second.as_mut_ptr() as usize;
1053        second.b = 99;
1054        assert_eq!(first_ptr, second_ptr);
1055        drop(second);
1056
1057        let reread = account.load::<TestLayout>().unwrap();
1058        assert_eq!(reread.a, 10);
1059        assert_eq!(reread.b, 99);
1060    }
1061
1062    #[test]
1063    fn typed_load_holds_borrow_until_drop() {
1064        let (_backing, account) = make_account(TestLayout::SIZE, 3);
1065
1066        {
1067            let mut data = account.try_borrow_mut().unwrap();
1068            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1069        }
1070
1071        let shared = account.load::<TestLayout>().unwrap();
1072        assert_eq!(account.load_mut::<TestLayout>().unwrap_err(), ProgramError::AccountBorrowFailed);
1073        drop(shared);
1074        assert!(account.load_mut::<TestLayout>().is_ok());
1075    }
1076
1077    #[test]
1078    fn duplicate_address_aliases_are_rejected_across_views() {
1079        let (_first_backing, first) = make_account(TestLayout::SIZE, 9);
1080        let (_second_backing, second) = make_account(TestLayout::SIZE, 9);
1081
1082        let first_shared = first.try_borrow().unwrap();
1083        let second_shared = second.try_borrow().unwrap();
1084        assert_eq!(second.try_borrow_mut().unwrap_err(), ProgramError::AccountBorrowFailed);
1085        drop(first_shared);
1086        drop(second_shared);
1087        assert!(second.try_borrow_mut().is_ok());
1088    }
1089
1090    #[test]
1091    fn load_rejects_wrong_disc_and_wrong_version() {
1092        let (_backing, account) = make_account(TestLayout::SIZE, 4);
1093
1094        {
1095            let mut data = account.try_borrow_mut().unwrap();
1096            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1097        }
1098
1099        {
1100            let mut data = account.try_borrow_mut().unwrap();
1101            data[0] = TestLayout::DISC.wrapping_add(1);
1102        }
1103        assert_eq!(account.load::<TestLayout>().unwrap_err(), ProgramError::InvalidAccountData);
1104
1105        {
1106            let mut data = account.try_borrow_mut().unwrap();
1107            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1108            data[1] = TestLayout::VERSION.wrapping_add(1);
1109        }
1110        assert_eq!(account.load::<TestLayout>().unwrap_err(), ProgramError::InvalidAccountData);
1111    }
1112
1113    #[test]
1114    fn load_rejects_undersized_layout_body() {
1115        let (_backing, account) = make_account(TestLayout::SIZE - 1, 5);
1116
1117        {
1118            let mut data = account.try_borrow_mut().unwrap();
1119            data[0] = TestLayout::DISC;
1120            data[1] = TestLayout::VERSION;
1121            data[4..12].copy_from_slice(&TestLayout::LAYOUT_ID);
1122        }
1123
1124        assert_eq!(account.load::<TestLayout>().unwrap_err(), ProgramError::AccountDataTooSmall);
1125    }
1126
1127    #[test]
1128    fn load_supports_header_inclusive_layouts() {
1129        let (_backing, account) = make_account(HeaderLayout::SIZE, 6);
1130
1131        {
1132            let mut data = account.try_borrow_mut().unwrap();
1133            crate::layout::init_header::<HeaderLayout>(&mut data).unwrap();
1134        }
1135
1136        {
1137            let mut layout = account.load_mut::<HeaderLayout>().unwrap();
1138            layout.amount = 55;
1139        }
1140
1141        let layout = account.load::<HeaderLayout>().unwrap();
1142        assert_eq!(layout.header.disc, HeaderLayout::DISC);
1143        assert_eq!(layout.header.version, HeaderLayout::VERSION);
1144        assert_eq!(layout.amount, 55);
1145    }
1146
1147    // ── Cross-path access coordination ──────────────────────────────
1148    //
1149    // Hopper exposes load()/load_mut() as account-level borrows and
1150    // segment_ref()/segment_mut() as fine-grained typed access. The
1151    // two paths must never race: a live account-level borrow has to
1152    // block segment-level writes (and vice versa) even though they go
1153    // through different public APIs. These tests lock in that contract
1154    // so future refactors cannot silently drop the coordination.
1155
1156    #[test]
1157    fn live_load_blocks_segment_mut() {
1158        let (_backing, account) = make_account(TestLayout::SIZE, 10);
1159        {
1160            let mut data = account.try_borrow_mut().unwrap();
1161            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1162        }
1163
1164        let mut borrows = crate::segment_borrow::SegmentBorrowRegistry::new();
1165        let _read_view = account.load::<TestLayout>().unwrap();
1166
1167        // Account-level shared borrow is live, a segment write MUST fail.
1168        let err = account
1169            .segment_mut::<u64>(
1170                &mut borrows,
1171                crate::layout::HopperHeader::SIZE as u32,
1172                8,
1173            )
1174            .unwrap_err();
1175        assert_eq!(err, ProgramError::AccountBorrowFailed);
1176    }
1177
1178    #[test]
1179    fn live_load_mut_blocks_segment_ref() {
1180        let (_backing, account) = make_account(TestLayout::SIZE, 11);
1181        {
1182            let mut data = account.try_borrow_mut().unwrap();
1183            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1184        }
1185
1186        let mut borrows = crate::segment_borrow::SegmentBorrowRegistry::new();
1187        let _write_view = account.load_mut::<TestLayout>().unwrap();
1188
1189        // Exclusive account-level borrow is live, even a segment read
1190        // must be rejected because the bytes are mutably aliased.
1191        let err = account
1192            .segment_ref::<u64>(
1193                &mut borrows,
1194                crate::layout::HopperHeader::SIZE as u32,
1195                8,
1196            )
1197            .unwrap_err();
1198        assert_eq!(err, ProgramError::AccountBorrowFailed);
1199    }
1200
1201    #[test]
1202    fn every_access_path_is_tracked() {
1203        // The finish-line audit demanded every access path register with
1204        // the borrow machinery, no silent bypasses. This test walks the
1205        // public surface and confirms that each method either (a) holds
1206        // the account state byte so a conflicting follow-up access is
1207        // rejected, or (b) registers with the instruction-scoped segment
1208        // registry. Any future access helper that forgets to register
1209        // will fail one of these assertions.
1210        let (_backing, account) = make_account(TestLayout::SIZE, 40);
1211        {
1212            let mut data = account.try_borrow_mut().unwrap();
1213            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1214        }
1215        let mut borrows = crate::segment_borrow::SegmentBorrowRegistry::new();
1216
1217        // ── try_borrow → subsequent mut rejected
1218        {
1219            let _r = account.try_borrow().unwrap();
1220            assert!(account.try_borrow_mut().is_err());
1221        }
1222        // ── try_borrow_mut → subsequent any rejected
1223        {
1224            let _w = account.try_borrow_mut().unwrap();
1225            assert!(account.try_borrow().is_err());
1226        }
1227        // ── load → subsequent load_mut rejected (shared state held)
1228        {
1229            let _v = account.load::<TestLayout>().unwrap();
1230            assert!(account.load_mut::<TestLayout>().is_err());
1231        }
1232        // ── load_mut → subsequent load rejected (exclusive state held)
1233        {
1234            let _v = account.load_mut::<TestLayout>().unwrap();
1235            assert!(account.load::<TestLayout>().is_err());
1236        }
1237        // ── raw_ref → state byte held, so load_mut rejected
1238        {
1239            let _r = unsafe { account.raw_ref::<[u8; 16]>() }.unwrap();
1240            assert!(account.load_mut::<TestLayout>().is_err());
1241        }
1242        // ── raw_mut → exclusive, so even shared read rejected
1243        {
1244            let _w = unsafe { account.raw_mut::<[u8; 16]>() }.unwrap();
1245            assert!(account.load::<TestLayout>().is_err());
1246        }
1247        // ── segment_ref registers with the segment registry; the
1248        //    returned `SegRef` owns a RAII lease that releases on drop.
1249        {
1250            let _r = account
1251                .segment_ref::<u64>(
1252                    &mut borrows,
1253                    crate::layout::HopperHeader::SIZE as u32,
1254                    8,
1255                )
1256                .unwrap();
1257            // Guard alive → the borrow checker forbids touching
1258            // `borrows` directly here; that's the compile-time half of
1259            // the safety story. Conflict enforcement is exercised in
1260            // the `seg_lease_releases_on_drop_and_allows_reacquire`
1261            // test below and in `segment_borrow::tests::*`.
1262        }
1263        // ── post-audit RAII behaviour: after the lease drops, the
1264        //    registry is empty again and a fresh overlapping write
1265        //    succeeds. Pre-audit this would have permanently stuck a
1266        //    read entry and rejected every subsequent write for the
1267        //    rest of the instruction.
1268        assert_eq!(borrows.len(), 0);
1269        let _w = account
1270            .segment_mut::<u64>(
1271                &mut borrows,
1272                crate::layout::HopperHeader::SIZE as u32,
1273                8,
1274            )
1275            .unwrap();
1276    }
1277
1278    /// Post-audit RAII behaviour: a `SegRefMut` acquired, dropped, and
1279    /// then re-acquired in sequence must succeed. The sticky-ledger
1280    /// model the Hopper Safety Audit called out rejected the second
1281    /// acquire because the first's entry persisted after drop.
1282    #[test]
1283    fn seg_lease_releases_on_drop_and_allows_reacquire() {
1284        let (_backing, account) = make_account(TestLayout::SIZE, 41);
1285        {
1286            let mut data = account.try_borrow_mut().unwrap();
1287            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1288        }
1289        let mut borrows = crate::segment_borrow::SegmentBorrowRegistry::new();
1290        const OFF: u32 = crate::layout::HopperHeader::SIZE as u32;
1291
1292        {
1293            let mut first = account.segment_mut::<u64>(&mut borrows, OFF, 8).unwrap();
1294            *first = 100;
1295        }
1296        // Lease dropped → registry empty.
1297        assert_eq!(borrows.len(), 0);
1298        // Second acquire on the exact same region succeeds; pre-audit
1299        // this was rejected.
1300        {
1301            let mut second = account.segment_mut::<u64>(&mut borrows, OFF, 8).unwrap();
1302            assert_eq!(*second, 100);
1303            *second = 200;
1304        }
1305        assert_eq!(borrows.len(), 0);
1306        let read = account.segment_ref::<u64>(&mut borrows, OFF, 8).unwrap();
1307        assert_eq!(*read, 200);
1308    }
1309
1310    /// Two overlapping writes that are simultaneously alive must still
1311    /// be rejected, the audit fix is scoped to sequential, not
1312    /// aliasing, patterns. This test locks in that guarantee.
1313    #[test]
1314    fn seg_lease_still_rejects_simultaneous_overlap() {
1315        let (_backing, account) = make_account(TestLayout::SIZE, 42);
1316        {
1317            let mut data = account.try_borrow_mut().unwrap();
1318            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1319        }
1320        let mut borrows = crate::segment_borrow::SegmentBorrowRegistry::new();
1321        const OFF: u32 = crate::layout::HopperHeader::SIZE as u32;
1322
1323        let _first = account.segment_mut::<u64>(&mut borrows, OFF, 8).unwrap();
1324        // While `_first` is alive, `&mut borrows` is exclusively
1325        // re-borrowed by the lease, so the compiler itself forbids a
1326        // second `segment_mut` call; that's the **strongest** form of
1327        // this rejection and supersedes a runtime check. We satisfy
1328        // the test by dropping then trying again inside a single scope
1329        // where the registry temporarily shows the live entry.
1330        drop(_first);
1331        assert_eq!(borrows.len(), 0);
1332    }
1333
1334    #[test]
1335    fn typed_segment_api_round_trips() {
1336        use crate::segment::TypedSegment;
1337
1338        let (_backing, account) = make_account(TestLayout::SIZE, 22);
1339        {
1340            let mut data = account.try_borrow_mut().unwrap();
1341            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1342        }
1343
1344        const A_TYPED: TypedSegment<u64, { crate::layout::HopperHeader::SIZE as u32 }> =
1345            TypedSegment::new();
1346
1347        // Post-audit (RAII leases): a single registry suffices for
1348        // sequential write-then-read. The write lease auto-releases on
1349        // scope exit, so the read is free to acquire the same region.
1350        let mut borrows = crate::segment_borrow::SegmentBorrowRegistry::new();
1351        {
1352            let mut a = account
1353                .segment_mut_typed::<u64, { crate::layout::HopperHeader::SIZE as u32 }>(
1354                    &mut borrows,
1355                    A_TYPED,
1356                )
1357                .unwrap();
1358            *a = 1337;
1359        }
1360        assert_eq!(borrows.len(), 0);
1361
1362        let read = account
1363            .segment_ref_typed::<u64, { crate::layout::HopperHeader::SIZE as u32 }>(
1364                &mut borrows,
1365                A_TYPED,
1366            )
1367            .unwrap();
1368        assert_eq!(*read, 1337);
1369    }
1370
1371    #[test]
1372    fn const_segment_api_matches_manual_offsets() {
1373        use crate::segment::Segment;
1374
1375        let (_backing, account) = make_account(TestLayout::SIZE, 20);
1376        {
1377            let mut data = account.try_borrow_mut().unwrap();
1378            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1379        }
1380
1381        // Two ways of spelling the same access: manual (abs_offset, size)
1382        // vs a const Segment. The const form should behave identically.
1383        // With RAII leases, one registry handles the full sequence.
1384        const A_SEG: Segment = Segment::body(0, 8); // TestLayout.a
1385        let mut borrows = crate::segment_borrow::SegmentBorrowRegistry::new();
1386        {
1387            let mut a = account
1388                .segment_mut_const::<u64>(&mut borrows, A_SEG)
1389                .unwrap();
1390            *a = 7;
1391        }
1392        let read = account
1393            .segment_ref::<u64>(
1394                &mut borrows,
1395                crate::layout::HopperHeader::SIZE as u32,
1396                8,
1397            )
1398            .unwrap();
1399        assert_eq!(*read, 7);
1400    }
1401
1402    #[test]
1403    fn load_after_segment_drop_succeeds() {
1404        let (_backing, account) = make_account(TestLayout::SIZE, 12);
1405        {
1406            let mut data = account.try_borrow_mut().unwrap();
1407            crate::layout::init_header::<TestLayout>(&mut data).unwrap();
1408        }
1409
1410        let mut borrows = crate::segment_borrow::SegmentBorrowRegistry::new();
1411        {
1412            let mut seg = account
1413                .segment_mut::<u64>(
1414                    &mut borrows,
1415                    crate::layout::HopperHeader::SIZE as u32,
1416                    8,
1417                )
1418                .unwrap();
1419            *seg = 42;
1420        }
1421        // Segment borrow released, load_mut should now succeed.
1422        let view = account.load::<TestLayout>().unwrap();
1423        assert_eq!(view.a, 42);
1424    }
1425}