use core::{hash::Hash, marker::PhantomData};
use crate::{Emit, Flat, Near, NearList, Patch, Region, buf::Buf, emitter::Pos, list::Segment};
#[derive(Copy, Clone)]
pub(crate) struct Brand<'id>(PhantomData<fn(&'id ()) -> &'id ()>);
impl Brand<'_> {
pub(crate) const fn new() -> Self {
Self(PhantomData)
}
}
pub struct Ref<'id, T: Flat> {
pos: Pos,
#[expect(
dead_code,
reason = "phantom field — carries invariant 'id brand for compile-time safety"
)]
brand: Brand<'id>,
_type: PhantomData<T>,
}
impl<T: Flat> Copy for Ref<'_, T> {}
impl<T: Flat> Clone for Ref<'_, T> {
fn clone(&self) -> Self {
*self
}
}
impl<T: Flat> PartialEq for Ref<'_, T> {
fn eq(&self, other: &Self) -> bool {
self.pos == other.pos
}
}
impl<T: Flat> Eq for Ref<'_, T> {}
impl<T: Flat> Hash for Ref<'_, T> {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.pos.0.hash(state);
}
}
impl<'id, T: Flat> Ref<'id, T> {
const fn new(pos: Pos, brand: Brand<'id>) -> Self {
Self { pos, brand, _type: PhantomData }
}
}
unsafe impl<T: Flat> Emit<T> for Ref<'_, T> {
fn emit(self, _p: &mut impl Patch) -> Pos {
self.pos
}
unsafe fn write_at(self, p: &mut impl Patch, at: Pos) {
p.reserve(p.byte_len() as u32);
unsafe {
let addr = p.raw_ptr().add(self.pos.0 as usize).addr();
let val = &*core::ptr::with_exposed_provenance::<T>(addr);
Emit::<T>::write_at(val, p, at);
}
}
}
pub struct ListTail<'id, U: Flat> {
seg_pos: Pos,
len: u32,
head_abs: Pos,
#[expect(
dead_code,
reason = "phantom field — carries invariant 'id brand for compile-time safety"
)]
brand: Brand<'id>,
_type: PhantomData<U>,
}
impl<U: Flat> Copy for ListTail<'_, U> {}
impl<U: Flat> Clone for ListTail<'_, U> {
fn clone(&self) -> Self {
*self
}
}
#[cfg(feature = "alloc")]
pub struct Session<'id, 'a, Root: Flat, B: Buf = crate::buf::AlignedBuf<Root>> {
region: &'a mut Region<Root, B>,
brand: Brand<'id>,
}
#[cfg(not(feature = "alloc"))]
pub struct Session<'id, 'a, Root: Flat, B: Buf> {
region: &'a mut Region<Root, B>,
brand: Brand<'id>,
}
impl<'id, 'a, Root: Flat, B: Buf> Session<'id, 'a, Root, B> {
pub(crate) const fn new(region: &'a mut Region<Root, B>, brand: Brand<'id>) -> Self {
Self { region, brand }
}
#[must_use]
pub const fn root(&self) -> Ref<'id, Root> {
Ref::new(Pos::ZERO, self.brand)
}
#[must_use]
pub fn at<T: Flat>(&self, r: Ref<'id, T>) -> &T {
let base = self.region.deref_raw();
let start = r.pos.0 as usize;
assert!(start + size_of::<T>() <= self.region.byte_len(), "session at out of bounds");
unsafe { &*base.add(start).cast::<T>() }
}
#[must_use]
pub fn nav<T: Flat, U: Flat>(&self, r: Ref<'id, T>, f: impl FnOnce(&T) -> &U) -> Ref<'id, U> {
let base = self.region.deref_raw() as usize;
let val = self.at(r);
let field_ptr = core::ptr::from_ref::<U>(f(val)) as usize;
let offset =
field_ptr.checked_sub(base).expect("navigated field is not within this region's buffer");
assert!(offset + size_of::<U>() <= self.region.byte_len(), "navigated field out of bounds");
Ref::new(Pos(offset as u32), self.brand)
}
#[must_use]
pub fn follow<U: Flat>(&self, r: Ref<'id, Near<U>>) -> Ref<'id, U> {
let base = self.region.deref_raw() as usize;
let near = self.at(r);
let target_ptr = core::ptr::from_ref::<U>(near.get()) as usize;
let offset = target_ptr.checked_sub(base).expect("Near target outside region");
Ref::new(Pos(offset as u32), self.brand)
}
#[must_use]
pub fn ref_of<U: Flat>(&self, val: &U) -> Ref<'id, U> {
let base = self.region.deref_raw() as usize;
let ptr = core::ptr::from_ref(val) as usize;
let offset = ptr.checked_sub(base).expect("ref_of: value not within region");
assert!(offset + size_of::<U>() <= self.region.byte_len(), "ref_of: value out of bounds");
Ref::new(Pos(offset as u32), self.brand)
}
pub fn set<T: Flat + Copy>(&mut self, r: Ref<'id, T>, val: T) {
unsafe { self.region.write_flat_internal(r.pos, val) };
}
pub fn write<T: Flat>(&mut self, r: Ref<'id, T>, builder: impl Emit<T>) {
unsafe { builder.write_at(self.region, r.pos) };
}
pub fn splice<U: Flat>(&mut self, r: Ref<'id, Near<U>>, builder: impl Emit<U>) {
let target = builder.emit(self.region);
unsafe { self.region.patch_near_internal(r.pos, target) };
}
pub fn splice_list<U: Flat, E: Emit<U>, I>(&mut self, r: Ref<'id, NearList<U>>, items: I)
where
I: IntoIterator<Item = E>,
I::IntoIter: ExactSizeIterator,
{
let list_pos = r.pos;
let iter = items.into_iter();
let count = iter.len();
let len = count as u32;
if len == 0 {
unsafe { self.region.patch_list_header_internal(list_pos, Pos::ZERO, 0) };
return;
}
let seg_pos = self.region.alloc_segment_internal::<U>(len);
let values_offset = size_of::<Segment<U>>();
for (i, item) in iter.enumerate() {
let val_pos = seg_pos.offset(values_offset + i * size_of::<U>());
unsafe { item.write_at(self.region, val_pos) };
}
unsafe { self.region.patch_list_header_internal(list_pos, seg_pos, len) };
}
pub fn re_splice_list<U: Flat>(&mut self, r: Ref<'id, NearList<U>>, refs: &[Ref<'id, U>]) {
let list_pos = r.pos;
let count = refs.len();
let len = count as u32;
if len == 0 {
unsafe { self.region.patch_list_header_internal(list_pos, Pos::ZERO, 0) };
return;
}
let seg_overhead = size_of::<Segment<U>>() + count * size_of::<U>() + align_of::<Segment<U>>();
self.region.reserve_internal((self.region.byte_len() + seg_overhead) as u32);
let seg_pos = self.region.alloc_segment_internal::<U>(len);
let values_offset = size_of::<Segment<U>>();
let base_addr = self.region.deref_raw().addr();
for (i, &item_ref) in refs.iter().enumerate() {
let val_pos = seg_pos.offset(values_offset + i * size_of::<U>());
unsafe {
let addr = base_addr.wrapping_add(item_ref.pos.0 as usize);
let val = &*core::ptr::with_exposed_provenance::<U>(addr);
Emit::<U>::write_at(val, self.region, val_pos);
}
}
unsafe { self.region.patch_list_header_internal(list_pos, seg_pos, len) };
}
#[expect(
clippy::cast_sign_loss,
clippy::cast_possible_wrap,
reason = "segment offset arithmetic: buffer positions are always non-negative and fit in i64"
)]
pub fn map_list<U: Flat + Copy>(&mut self, r: Ref<'id, NearList<U>>, mut f: impl FnMut(U) -> U) {
let (count, first_seg_off) = self.list_meta(r);
if count == 0 {
return;
}
let base = self.region.deref_raw();
let base_addr = base.addr();
let mut seg_off = first_seg_off;
let mut remaining = count;
while remaining > 0 {
unsafe {
let seg_addr = base_addr.wrapping_add(seg_off);
let seg = &*core::ptr::with_exposed_provenance::<Segment<U>>(seg_addr);
let seg_len = seg.len as usize;
let vals_base = seg_addr.wrapping_add(size_of::<Segment<U>>());
for j in 0..seg_len {
let val_addr = vals_base.wrapping_add(j * size_of::<U>());
let val = core::ptr::with_exposed_provenance::<U>(val_addr).read();
let mapped = f(val);
self.region.write_flat_internal(Pos((val_addr - base_addr) as u32), mapped);
}
remaining -= seg_len;
if remaining > 0 {
seg_off = (seg_off as i64 + i64::from(seg.next)) as usize;
}
}
}
}
pub fn push_front<U: Flat>(&mut self, r: Ref<'id, NearList<U>>, item: impl Emit<U>) {
let list: &NearList<U> = self.at(r);
let old_len = list.len() as u32;
let old_head_offset = list.head_offset();
let seg_pos = self.region.alloc_segment_internal::<U>(1);
let val_pos = seg_pos.offset(size_of::<Segment<U>>());
unsafe { item.write_at(self.region, val_pos) };
if old_len > 0 {
let head_field_abs = i64::from(r.pos.0);
let old_first_abs = head_field_abs + i64::from(old_head_offset);
#[expect(clippy::cast_sign_loss, reason = "absolute position is always non-negative")]
let old_first_pos = Pos(old_first_abs as u32);
unsafe { self.region.patch_segment_next_internal(seg_pos, old_first_pos) };
}
unsafe { self.region.patch_list_header_internal(r.pos, seg_pos, old_len + 1) };
}
pub fn push_back<U: Flat>(
&mut self,
r: Ref<'id, NearList<U>>,
tail: Option<ListTail<'id, U>>,
item: impl Emit<U>,
) -> ListTail<'id, U> {
let seg_pos = self.region.alloc_segment_internal::<U>(1);
let val_pos = seg_pos.offset(size_of::<Segment<U>>());
unsafe { item.write_at(self.region, val_pos) };
if let Some(t) = tail {
unsafe { self.region.patch_segment_next_internal(t.seg_pos, seg_pos) };
let new_len = t.len + 1;
unsafe { self.region.patch_list_header_internal(r.pos, t.head_abs, new_len) };
ListTail {
seg_pos,
len: new_len,
head_abs: t.head_abs,
brand: self.brand,
_type: PhantomData,
}
} else {
let list: &NearList<U> = self.at(r);
let old_len = list.len() as u32;
if old_len == 0 {
unsafe { self.region.patch_list_header_internal(r.pos, seg_pos, 1) };
ListTail { seg_pos, len: 1, head_abs: seg_pos, brand: self.brand, _type: PhantomData }
} else {
let head_off = list.head_offset();
#[expect(clippy::cast_sign_loss, reason = "absolute positions are always non-negative")]
let (last_seg_pos, head_abs) = {
let base = self.region.deref_raw();
let head_abs = (i64::from(r.pos.0) + i64::from(head_off)) as usize;
let mut seg_abs = head_abs;
loop {
let next_rel = unsafe { core::ptr::read_unaligned(base.add(seg_abs).cast::<i32>()) };
if next_rel == 0 {
break;
}
#[expect(clippy::cast_possible_wrap, reason = "buffer offsets fit in i64")]
{
seg_abs = (seg_abs as i64 + i64::from(next_rel)) as usize;
}
}
(Pos(seg_abs as u32), Pos(head_abs as u32))
};
unsafe { self.region.patch_segment_next_internal(last_seg_pos, seg_pos) };
let new_len = old_len + 1;
unsafe { self.region.patch_list_header_internal(r.pos, head_abs, new_len) };
ListTail { seg_pos, len: new_len, head_abs, brand: self.brand, _type: PhantomData }
}
}
}
pub fn extend_list<U: Flat, E: Emit<U>, I>(&mut self, r: Ref<'id, NearList<U>>, extra: I)
where
I: IntoIterator<Item = E>,
I::IntoIter: ExactSizeIterator,
{
let list = self.at(r);
let old_len = list.len() as u32;
let head_off = list.head_offset();
#[expect(clippy::cast_sign_loss, reason = "absolute positions are always non-negative")]
let last_seg_pos = if old_len == 0 {
None
} else {
let base = self.region.deref_raw();
let mut seg_abs = (i64::from(r.pos.0) + i64::from(head_off)) as usize;
loop {
let next_rel = unsafe { core::ptr::read_unaligned(base.add(seg_abs).cast::<i32>()) };
if next_rel == 0 {
break;
}
#[expect(
clippy::cast_possible_wrap,
clippy::cast_sign_loss,
reason = "buffer offsets fit in i64"
)]
{
seg_abs = (seg_abs as i64 + i64::from(next_rel)) as usize;
}
}
Some(Pos(seg_abs as u32))
};
let iter = extra.into_iter();
let count = iter.len() as u32;
if count == 0 {
return;
}
let seg_pos = self.region.alloc_segment_internal::<U>(count);
let values_offset = size_of::<Segment<U>>();
for (i, item) in iter.enumerate() {
let val_pos = seg_pos.offset(values_offset + i * size_of::<U>());
unsafe { item.write_at(self.region, val_pos) };
}
if let Some(last) = last_seg_pos {
unsafe { self.region.patch_segment_next_internal(last, seg_pos) };
#[expect(clippy::cast_sign_loss, reason = "absolute position is always non-negative")]
let first_abs = Pos((i64::from(r.pos.0) + i64::from(head_off)) as u32);
unsafe { self.region.patch_list_header_internal(r.pos, first_abs, old_len + count) };
} else {
unsafe { self.region.patch_list_header_internal(r.pos, seg_pos, count) };
}
}
#[must_use]
pub fn graft<U: Flat, B2: Buf>(&mut self, src: &Region<U, B2>) -> Ref<'id, U> {
let pos = self.region.graft_internal(src);
Ref::new(pos, self.brand)
}
#[cfg(feature = "alloc")]
#[must_use]
pub fn list_refs<T: Flat>(&self, list: Ref<'id, NearList<T>>) -> alloc::vec::Vec<Ref<'id, T>> {
let nl = self.at(list);
let len = nl.len();
if len == 0 {
return alloc::vec::Vec::new();
}
let base = self.region.deref_raw() as usize;
let mut refs = alloc::vec::Vec::with_capacity(len);
for elem in nl {
let offset = (core::ptr::from_ref(elem) as usize) - base;
refs.push(Ref::new(Pos(offset as u32), self.brand));
}
refs
}
#[expect(clippy::cast_sign_loss, reason = "absolute positions are always non-negative")]
fn list_meta<U: Flat>(&self, r: Ref<'id, NearList<U>>) -> (usize, usize) {
let list: &NearList<U> = self.at(r);
let count = list.len();
if count == 0 {
return (0, 0);
}
let first_seg_off = (i64::from(r.pos.0) + i64::from(list.head_offset())) as usize;
(count, first_seg_off)
}
#[cfg(feature = "alloc")]
#[expect(
clippy::cast_sign_loss,
clippy::cast_possible_wrap,
reason = "segment offset arithmetic: buffer positions are always non-negative and fit in i64"
)]
fn collect_list_positions<U: Flat>(&self, r: Ref<'id, NearList<U>>) -> alloc::vec::Vec<u32> {
let (count, first_seg_off) = self.list_meta(r);
if count == 0 {
return alloc::vec::Vec::new();
}
let mut positions = alloc::vec::Vec::with_capacity(count);
let base_addr = self.region.deref_raw().addr();
let mut seg_off = first_seg_off;
let mut remaining = count;
while remaining > 0 {
unsafe {
let seg_addr = base_addr.wrapping_add(seg_off);
let seg = &*core::ptr::with_exposed_provenance::<Segment<U>>(seg_addr);
let seg_len = seg.len as usize;
let vals_base = seg_addr.wrapping_add(size_of::<Segment<U>>());
for j in 0..seg_len {
let val_off = vals_base.wrapping_add(j * size_of::<U>()) - base_addr;
positions.push(val_off as u32);
}
remaining -= seg_len;
if remaining > 0 {
seg_off = (seg_off as i64 + i64::from(seg.next)) as usize;
}
}
}
positions
}
#[expect(
clippy::cast_sign_loss,
clippy::cast_possible_wrap,
reason = "segment offset arithmetic: buffer positions are always non-negative and fit in i64"
)]
pub fn filter_list<U: Flat>(
&mut self,
r: Ref<'id, NearList<U>>,
mut pred: impl FnMut(&U) -> bool,
) {
let (count, first_seg_off) = self.list_meta(r);
if count == 0 {
return;
}
let kept_count = {
let base_addr = self.region.deref_raw().addr();
let mut seg_off = first_seg_off;
let mut remaining = count;
let mut kept = 0usize;
while remaining > 0 {
unsafe {
let seg_addr = base_addr.wrapping_add(seg_off);
let seg = &*core::ptr::with_exposed_provenance::<Segment<U>>(seg_addr);
let seg_len = seg.len as usize;
let vals_base = seg_addr.wrapping_add(size_of::<Segment<U>>());
for j in 0..seg_len {
let val_addr = vals_base.wrapping_add(j * size_of::<U>());
let val = &*core::ptr::with_exposed_provenance::<U>(val_addr);
if pred(val) {
kept += 1;
}
}
remaining -= seg_len;
if remaining > 0 {
seg_off = (seg_off as i64 + i64::from(seg.next)) as usize;
}
}
}
kept
};
if kept_count == count {
return;
}
let list_pos = r.pos;
let len = kept_count as u32;
if kept_count == 0 {
unsafe { self.region.patch_list_header_internal(list_pos, Pos::ZERO, 0) };
return;
}
let seg_overhead =
size_of::<Segment<U>>() + kept_count * size_of::<U>() + align_of::<Segment<U>>();
self.region.reserve_internal((self.region.byte_len() + seg_overhead) as u32);
let seg_pos = self.region.alloc_segment_internal::<U>(len);
let values_offset = size_of::<Segment<U>>();
let base_addr = self.region.deref_raw().addr();
let mut seg_off = first_seg_off;
let mut remaining = count;
let mut dest_i = 0;
while remaining > 0 {
unsafe {
let seg_addr = base_addr.wrapping_add(seg_off);
let seg = &*core::ptr::with_exposed_provenance::<Segment<U>>(seg_addr);
let seg_len = seg.len as usize;
let vals_base = seg_addr.wrapping_add(size_of::<Segment<U>>());
for j in 0..seg_len {
let val_addr = vals_base.wrapping_add(j * size_of::<U>());
let val = &*core::ptr::with_exposed_provenance::<U>(val_addr);
if pred(val) {
let val_pos = seg_pos.offset(values_offset + dest_i * size_of::<U>());
Emit::<U>::write_at(val, self.region, val_pos);
dest_i += 1;
}
}
remaining -= seg_len;
if remaining > 0 {
seg_off = (seg_off as i64 + i64::from(seg.next)) as usize;
}
}
}
unsafe { self.region.patch_list_header_internal(list_pos, seg_pos, len) };
}
#[expect(
clippy::cast_sign_loss,
clippy::cast_possible_wrap,
reason = "segment offset arithmetic: buffer positions are always non-negative and fit in i64"
)]
pub fn reverse_list<U: Flat>(&mut self, r: Ref<'id, NearList<U>>) {
let list_pos = r.pos;
let list: &NearList<U> = self.at(r);
let count = list.len();
if count <= 1 {
return;
}
let head_offset = list.head_offset();
let len = count as u32;
let first_seg_off = (i64::from(list_pos.0) + i64::from(head_offset)) as usize;
let seg_overhead = size_of::<Segment<U>>() + count * size_of::<U>() + align_of::<Segment<U>>();
self.region.reserve_internal((self.region.byte_len() + seg_overhead) as u32);
let seg_pos = self.region.alloc_segment_internal::<U>(len);
let values_offset = size_of::<Segment<U>>();
let base_addr = self.region.deref_raw().addr();
let mut seg_off = first_seg_off;
let mut remaining = count;
let mut dest_i = count; while remaining > 0 {
unsafe {
let seg_addr = base_addr.wrapping_add(seg_off);
let seg = &*core::ptr::with_exposed_provenance::<Segment<U>>(seg_addr);
let seg_len = seg.len as usize;
let vals_base = seg_addr.wrapping_add(size_of::<Segment<U>>());
for j in 0..seg_len {
dest_i -= 1;
let val_addr = vals_base.wrapping_add(j * size_of::<U>());
let val = &*core::ptr::with_exposed_provenance::<U>(val_addr);
let val_pos = seg_pos.offset(values_offset + dest_i * size_of::<U>());
Emit::<U>::write_at(val, self.region, val_pos);
}
remaining -= seg_len;
if remaining > 0 {
seg_off = (seg_off as i64 + i64::from(seg.next)) as usize;
}
}
}
unsafe { self.region.patch_list_header_internal(list_pos, seg_pos, len) };
}
#[cfg(feature = "alloc")]
pub fn sort_list<U: Flat>(
&mut self,
r: Ref<'id, NearList<U>>,
mut cmp: impl FnMut(&U, &U) -> core::cmp::Ordering,
) {
let mut positions = self.collect_list_positions::<U>(r);
if positions.len() <= 1 {
return;
}
let base = self.region.deref_raw();
positions.sort_by(|&a_pos, &b_pos| {
unsafe {
let a = &*core::ptr::with_exposed_provenance::<U>(base.add(a_pos as usize).addr());
let b = &*core::ptr::with_exposed_provenance::<U>(base.add(b_pos as usize).addr());
cmp(a, b)
}
});
let list_pos = r.pos;
let count = positions.len();
let len = count as u32;
let seg_overhead = size_of::<Segment<U>>() + count * size_of::<U>() + align_of::<Segment<U>>();
self.region.reserve_internal((self.region.byte_len() + seg_overhead) as u32);
let seg_pos = self.region.alloc_segment_internal::<U>(len);
let values_offset = size_of::<Segment<U>>();
let base_addr = self.region.deref_raw().addr();
for (i, &elem_off) in positions.iter().enumerate() {
unsafe {
let val_pos = seg_pos.offset(values_offset + i * size_of::<U>());
let addr = base_addr.wrapping_add(elem_off as usize);
let val = &*core::ptr::with_exposed_provenance::<U>(addr);
Emit::<U>::write_at(val, self.region, val_pos);
}
}
unsafe { self.region.patch_list_header_internal(list_pos, seg_pos, len) };
}
#[expect(
clippy::cast_sign_loss,
clippy::cast_possible_wrap,
reason = "segment offset arithmetic: buffer positions are always non-negative and fit in i64"
)]
pub fn dedup_list<U: Flat>(
&mut self,
r: Ref<'id, NearList<U>>,
mut eq: impl FnMut(&U, &U) -> bool,
) {
let (count, first_seg_off) = self.list_meta(r);
if count <= 1 {
return;
}
let kept_count = {
let base_addr = self.region.deref_raw().addr();
let mut seg_off = first_seg_off;
let mut remaining = count;
let mut kept = 0usize;
let mut prev_off: Option<usize> = None;
while remaining > 0 {
unsafe {
let seg_addr = base_addr.wrapping_add(seg_off);
let seg = &*core::ptr::with_exposed_provenance::<Segment<U>>(seg_addr);
let seg_len = seg.len as usize;
let vals_base = seg_addr.wrapping_add(size_of::<Segment<U>>());
for j in 0..seg_len {
let val_addr = vals_base.wrapping_add(j * size_of::<U>());
let val = &*core::ptr::with_exposed_provenance::<U>(val_addr);
let is_dup = prev_off.is_some_and(|p| {
let prev = &*core::ptr::with_exposed_provenance::<U>(base_addr.wrapping_add(p));
eq(prev, val)
});
if !is_dup {
kept += 1;
}
prev_off = Some(val_addr - base_addr);
}
remaining -= seg_len;
if remaining > 0 {
seg_off = (seg_off as i64 + i64::from(seg.next)) as usize;
}
}
}
kept
};
if kept_count == count {
return;
}
let list_pos = r.pos;
let len = kept_count as u32;
let seg_overhead =
size_of::<Segment<U>>() + kept_count * size_of::<U>() + align_of::<Segment<U>>();
self.region.reserve_internal((self.region.byte_len() + seg_overhead) as u32);
let seg_pos = self.region.alloc_segment_internal::<U>(len);
let values_offset = size_of::<Segment<U>>();
let base_addr = self.region.deref_raw().addr();
let mut seg_off = first_seg_off;
let mut remaining = count;
let mut dest_i = 0;
let mut prev_off: Option<usize> = None;
while remaining > 0 {
unsafe {
let seg_addr = base_addr.wrapping_add(seg_off);
let seg = &*core::ptr::with_exposed_provenance::<Segment<U>>(seg_addr);
let seg_len = seg.len as usize;
let vals_base = seg_addr.wrapping_add(size_of::<Segment<U>>());
for j in 0..seg_len {
let val_addr = vals_base.wrapping_add(j * size_of::<U>());
let val = &*core::ptr::with_exposed_provenance::<U>(val_addr);
let is_dup = prev_off.is_some_and(|p| {
let prev = &*core::ptr::with_exposed_provenance::<U>(base_addr.wrapping_add(p));
eq(prev, val)
});
if !is_dup {
let val_pos = seg_pos.offset(values_offset + dest_i * size_of::<U>());
Emit::<U>::write_at(val, self.region, val_pos);
dest_i += 1;
}
prev_off = Some(val_addr - base_addr);
}
remaining -= seg_len;
if remaining > 0 {
seg_off = (seg_off as i64 + i64::from(seg.next)) as usize;
}
}
}
unsafe { self.region.patch_list_header_internal(list_pos, seg_pos, len) };
}
#[must_use]
pub fn list_item<T: Flat>(&self, list: Ref<'id, NearList<T>>, index: usize) -> Ref<'id, T> {
let nl = self.at(list);
let val = &nl[index];
self.ref_of(val)
}
#[must_use]
pub const fn cursor<'s>(&'s mut self) -> Cursor<'id, 's, 'a, Root, Root, B> {
let r = self.root();
Cursor { session: self, r }
}
#[must_use]
pub const fn cursor_at<'s, T: Flat>(
&'s mut self,
r: Ref<'id, T>,
) -> Cursor<'id, 's, 'a, T, Root, B> {
Cursor { session: self, r }
}
}
pub struct Cursor<'id, 's, 'a, T: Flat, Root: Flat, B: Buf> {
session: &'s mut Session<'id, 'a, Root, B>,
r: Ref<'id, T>,
}
impl<'id, 's, 'a, T: Flat, Root: Flat, B: Buf> Cursor<'id, 's, 'a, T, Root, B> {
#[must_use]
pub fn at<U: Flat>(self, f: impl FnOnce(&T) -> &U) -> Cursor<'id, 's, 'a, U, Root, B> {
let r = self.session.nav(self.r, f);
Cursor { session: self.session, r }
}
#[must_use]
pub fn get(&self) -> &T {
self.session.at(self.r)
}
#[must_use]
pub const fn pin(self) -> Ref<'id, T> {
self.r
}
}
impl<T: Flat + Copy, Root: Flat, B: Buf> Cursor<'_, '_, '_, T, Root, B> {
pub fn set(self, val: T) {
self.session.set(self.r, val);
}
}
impl<T: Flat, Root: Flat, B: Buf> Cursor<'_, '_, '_, T, Root, B> {
pub fn write_with(self, builder: impl Emit<T>) {
self.session.write(self.r, builder);
}
}
impl<'id, 's, 'a, U: Flat, Root: Flat, B: Buf> Cursor<'id, 's, 'a, Near<U>, Root, B> {
pub fn splice(self, builder: impl Emit<U>) {
self.session.splice(self.r, builder);
}
#[must_use]
pub fn follow(self) -> Cursor<'id, 's, 'a, U, Root, B> {
let r = self.session.follow(self.r);
Cursor { session: self.session, r }
}
}
impl<U: Flat, Root: Flat, B: Buf> Cursor<'_, '_, '_, NearList<U>, Root, B> {
pub fn splice_list<E: Emit<U>, I>(self, items: I)
where
I: IntoIterator<Item = E>,
I::IntoIter: ExactSizeIterator,
{
self.session.splice_list(self.r, items);
}
pub fn filter_list(self, pred: impl FnMut(&U) -> bool) {
self.session.filter_list(self.r, pred);
}
}