toml-spanner 1.0.2

High Performance Toml parser and deserializer that preserves span information with fast compile times.
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
#[cfg(test)]
#[path = "./table_tests.rs"]
mod tests;

use crate::Span;
use crate::item::{
    FLAG_DOTTED, FLAG_FROZEN, FLAG_HEADER, FLAG_TABLE, Item, ItemMetadata, Key, MaybeItem, NONE,
    TAG_TABLE, TableStyle,
};
use crate::parser::KeyRef;
use std::mem::size_of;
use std::ptr::NonNull;

use crate::arena::Arena;

pub(crate) type TableIndex<'de> = foldhash::HashMap<KeyRef<'de>, usize>;

type TableEntry<'de> = (Key<'de>, Item<'de>);

const MIN_CAP: u32 = 2;

/// A TOML table: a flat list of key-value pairs with linear lookup.
#[repr(C, align(8))]
pub(crate) struct InnerTable<'de> {
    pub(super) len: u32,
    pub(super) cap: u32,
    pub(super) ptr: NonNull<TableEntry<'de>>,
}

impl<'de> InnerTable<'de> {
    /// Creates an empty table.
    #[inline]
    pub fn new() -> Self {
        Self {
            len: 0,
            cap: 0,
            ptr: NonNull::dangling(),
        }
    }

    pub(crate) fn with_capacity(cap: u32, arena: &'de Arena) -> Self {
        let mut table = Self::new();
        if cap > 0 {
            table.grow_to(cap, arena);
        }
        table
    }

    /// Inserts a key-value pair. Does **not** check for duplicates.
    pub fn insert_unique(
        &mut self,
        key: Key<'de>,
        item: Item<'de>,
        arena: &'de Arena,
    ) -> &mut TableEntry<'de> {
        let len = self.len;
        if self.len == self.cap {
            self.grow(arena);
        }
        // SAFETY: grow() ensures len < cap, so ptr.add(len) is within the
        // allocation. The write targets uninitialized memory past the current length.
        unsafe {
            let ptr = self.ptr.as_ptr().add(len as usize);
            ptr.write((key, item));
            self.len = len + 1;
            &mut (*ptr)
        }
    }

    /// Returns the number of entries.
    #[inline]
    pub fn len(&self) -> usize {
        self.len as usize
    }

    /// Returns `true` if the table has no entries.
    #[inline]
    pub fn is_empty(&self) -> bool {
        self.len == 0
    }
    /// Looks up a key using the parser's hash index when the table is large
    /// enough, falling back to a linear scan for small tables.
    ///
    /// Both the table and the index must originate from the same `parse` call
    /// and neither must have been mutated since parsing.
    #[cfg(feature = "to-toml")]
    pub(crate) fn get_entry_with_index(
        &self,
        key: &str,
        index: &TableIndex<'_>,
    ) -> Option<&TableEntry<'de>> {
        if self.len() > crate::parser::INDEXED_TABLE_THRESHOLD {
            // SAFETY: len > INDEXED_TABLE_THRESHOLD (> 6), so the table is non-empty.
            let first_key_span = unsafe { self.first_key_span_start_unchecked() };
            let i = *index.get(&KeyRef::new(key, first_key_span))?;
            self.entries().get(i)
        } else {
            for entry in self.entries() {
                if entry.0.name == key {
                    return Some(entry);
                }
            }
            None
        }
    }

    pub(crate) fn get_entry_with_maybe_index(
        &self,
        key: &str,
        index: Option<&TableIndex<'_>>,
    ) -> Option<&TableEntry<'de>> {
        if self.len() > crate::parser::INDEXED_TABLE_THRESHOLD {
            if let Some(index) = index {
                // SAFETY: len > INDEXED_TABLE_THRESHOLD (> 6), so the table is non-empty.
                let first_key_span = unsafe { self.first_key_span_start_unchecked() };
                let i = *index.get(&KeyRef::new(key, first_key_span))?;
                return self.entries().get(i);
            }
        }
        for entry in self.entries() {
            if entry.0.name == key {
                return Some(entry);
            }
        }
        None
    }
    /// Linear scan for a key, returning both key and value references.
    pub fn get_entry(&self, name: &str) -> Option<(&Key<'de>, &Item<'de>)> {
        for (key, item) in self.entries() {
            if key.name == name {
                return Some((key, item));
            }
        }
        None
    }

    /// Returns a reference to the value for `name`.
    pub fn get(&self, name: &str) -> Option<&Item<'de>> {
        for (key, item) in self.entries() {
            if key.name == name {
                return Some(item);
            }
        }
        None
    }

    /// Returns a mutable reference to the value for `name`.
    pub fn get_mut(&mut self, name: &str) -> Option<&mut Item<'de>> {
        for (key, item) in self.entries_mut() {
            if key.name == name {
                return Some(item);
            }
        }
        None
    }

    /// Returns `true` if the table contains the key.
    #[inline]
    pub fn contains_key(&self, name: &str) -> bool {
        self.get(name).is_some()
    }

    /// Removes the first entry matching `name`, returning the key-value pair.
    /// Uses swap-remove, so the ordering of remaining entries may change.
    pub fn remove_entry(&mut self, name: &str) -> Option<(Key<'de>, Item<'de>)> {
        let idx = self.find_index(name)?;
        Some(self.remove_at(idx))
    }

    /// Returns the span start of the first key. Used as a table discriminator
    /// in the parser's hash index.
    ///
    /// # Safety
    ///
    /// The table must be non-empty (`self.len > 0`).
    #[inline]
    pub(crate) unsafe fn first_key_span_start_unchecked(&self) -> u32 {
        debug_assert!(self.len > 0);
        // SAFETY: caller guarantees len > 0, so the first entry is initialized.
        unsafe { (*self.ptr.as_ptr()).0.span.start }
    }

    /// Returns a slice of all entries.
    #[inline]
    pub fn entries(&self) -> &[TableEntry<'de>] {
        // SAFETY: ptr points to arena-allocated memory with at least len
        // initialized entries. When len == 0, ptr is NonNull::dangling() which
        // satisfies from_raw_parts' alignment requirement for zero-length slices.
        unsafe { std::slice::from_raw_parts(self.ptr.as_ptr(), self.len as usize) }
    }

    #[inline]
    pub fn entries_mut(&mut self) -> &mut [TableEntry<'de>] {
        // SAFETY: same as entries() — ptr is valid for len initialized entries.
        unsafe { std::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len as usize) }
    }

    pub(crate) fn find_index(&self, name: &str) -> Option<usize> {
        for (i, (key, _)) in self.entries().iter().enumerate() {
            if key.name == name {
                return Some(i);
            }
        }
        None
    }

    /// Remove entry at `idx` by swapping it with the last entry.
    fn remove_at(&mut self, idx: usize) -> (Key<'de>, Item<'de>) {
        let last = self.len as usize - 1;
        // SAFETY: idx was returned by find_index, so idx < len and the
        // pointer is within initialized entries. read() moves the value out.
        let ptr = unsafe { self.ptr.as_ptr().add(idx) };
        let entry = unsafe { ptr.read() };
        if idx != last {
            // Safety: `last` is a valid, initialized index distinct from `idx`.
            unsafe {
                ptr.write(self.ptr.as_ptr().add(last).read());
            }
        }
        self.len -= 1;
        entry
    }

    #[cold]
    fn grow(&mut self, arena: &'de Arena) {
        let new_cap = if self.cap == 0 {
            MIN_CAP
        } else {
            self.cap.checked_mul(2).expect("capacity overflow")
        };
        self.grow_to(new_cap, arena);
    }

    fn grow_to(&mut self, new_cap: u32, arena: &'de Arena) {
        // On 64-bit, u32 * size_of::<TableEntry>() cannot overflow usize.
        #[cfg(target_pointer_width = "32")]
        let new_size = (new_cap as usize)
            .checked_mul(size_of::<TableEntry<'_>>())
            .expect("capacity overflow");
        #[cfg(not(target_pointer_width = "32"))]
        let new_size = new_cap as usize * size_of::<TableEntry<'_>>();
        if self.cap > 0 {
            let old_size = self.cap as usize * size_of::<TableEntry<'_>>();
            // Safety: ptr was returned by a prior arena alloc of old_size bytes.
            self.ptr = unsafe { arena.realloc(self.ptr.cast(), old_size, new_size).cast() };
        } else {
            self.ptr = arena.alloc(new_size).cast();
        }
        self.cap = new_cap;
    }

    /// Deep-clones this table into `arena`. Keys and strings are shared
    /// with the source.
    pub(crate) fn clone_in(&self, arena: &'de Arena) -> Self {
        let len = self.len as usize;
        if len == 0 {
            return Self::new();
        }
        let size = len * size_of::<TableEntry<'de>>();
        let dst: NonNull<TableEntry<'de>> = arena.alloc(size).cast();
        let src = self.ptr.as_ptr();
        let dst_ptr = dst.as_ptr();

        let mut run_start = 0;
        for i in 0..len {
            // SAFETY: i < len, so src.add(i) is within initialized entries.
            if unsafe { !(*src.add(i)).1.is_scalar() } {
                if run_start < i {
                    // SAFETY: entries[run_start..i] have scalar values — Key is
                    // Copy, scalar Items are bitwise-copyable. Source and
                    // destination are disjoint arena regions.
                    unsafe {
                        std::ptr::copy_nonoverlapping(
                            src.add(run_start),
                            dst_ptr.add(run_start),
                            i - run_start,
                        );
                    }
                }
                // SAFETY: the entry's value is an aggregate; deep-clone it.
                // Key is Copy.
                unsafe {
                    let (key, item) = &*src.add(i);
                    dst_ptr.add(i).write((*key, item.clone_in(arena)));
                }
                run_start = i + 1;
            }
        }
        if run_start < len {
            unsafe {
                std::ptr::copy_nonoverlapping(
                    src.add(run_start),
                    dst_ptr.add(run_start),
                    len - run_start,
                );
            }
        }

        Self {
            len: self.len,
            cap: self.len,
            ptr: dst,
        }
    }

    /// Copies this table into `target`, returning a copy with `'static` lifetime.
    ///
    /// # Safety
    ///
    /// `target` must have sufficient space as computed by
    /// [`compute_size`](crate::item::owned).
    pub(crate) unsafe fn emplace_in(
        &self,
        target: &mut crate::item::owned::ItemCopyTarget,
    ) -> InnerTable<'static> {
        let len = self.len as usize;
        if len == 0 {
            return InnerTable::new();
        }
        let byte_size = len * size_of::<TableEntry<'static>>();
        // SAFETY: Caller guarantees sufficient aligned space for len entries.
        let dst_ptr = unsafe { target.alloc_aligned(byte_size) }
            .as_ptr()
            .cast::<TableEntry<'static>>();
        for (i, (key, item)) in self.entries().iter().enumerate() {
            let new_key = Key {
                name: unsafe { target.copy_str(key.name) },
                span: key.span,
            };
            let new_item = unsafe { item.emplace_in(target) };
            // SAFETY: i < len, within the allocated region.
            unsafe { dst_ptr.add(i).write((new_key, new_item)) };
        }
        InnerTable {
            len: self.len,
            cap: self.len,
            // SAFETY: dst_ptr is non-null (from alloc_aligned).
            ptr: unsafe { NonNull::new_unchecked(dst_ptr) },
        }
    }
}

impl std::fmt::Debug for InnerTable<'_> {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        let mut map = f.debug_map();
        for (k, v) in self.entries() {
            map.entry(k, v);
        }
        map.finish()
    }
}

/// Consuming iterator over a [`Table`], yielding `(`[`Key`]`, `[`Item`]`)` pairs.
pub struct IntoIter<'de> {
    table: InnerTable<'de>,
    index: u32,
}

impl<'de> Iterator for IntoIter<'de> {
    type Item = (Key<'de>, Item<'de>);

    fn next(&mut self) -> Option<Self::Item> {
        if self.index < self.table.len {
            // SAFETY: index < len is checked above, so the read is within
            // bounds of initialized entries.
            let entry = unsafe { self.table.ptr.as_ptr().add(self.index as usize).read() };
            self.index += 1;
            Some(entry)
        } else {
            None
        }
    }

    fn size_hint(&self) -> (usize, Option<usize>) {
        let remaining = (self.table.len - self.index) as usize;
        (remaining, Some(remaining))
    }
}

impl ExactSizeIterator for IntoIter<'_> {}

/// A TOML table, a transient collection of key/value pairs inside an [`Item`].
///
/// `Table` is an intermediate structure for converting between Rust types and
/// TOML through [`FromToml`] and [`ToToml`]. It is the top level value
/// returned by [`parse`](crate::parse) and the value inside any `[section]`
/// or inline `{ ... }` table.
///
/// <div class="warning">
///
/// Entries are stored in insertion order, but the TOML specification defines
/// table keys as unordered. Avoid relying on iteration order for semantic
/// purposes.
///
/// </div>
///
/// # Accessing values
///
/// Use `table["key"]` for index access, which returns a [`MaybeItem`] that
/// never panics on missing keys and supports chained indexing. Use
/// [`get`](Self::get) or [`get_mut`](Self::get_mut) for `Option` based access.
///
/// For structured extraction, use [`Item::table_helper`](crate::Item::table_helper)
/// to create a [`TableHelper`](crate::de::TableHelper).
///
/// # Lookup performance
///
/// Direct lookups ([`get`](Self::get), `table["key"]`) perform a linear scan
/// over entries, O(n) in the number of keys. For small tables or a handful
/// of lookups, as is typical in TOML, this is fast enough.
///
/// For structured conversion of larger tables, use
/// [`TableHelper`](crate::de::TableHelper) via
/// [`Document::table_helper`](crate::Document::table_helper) or
/// [`Item::table_helper`](crate::Item::table_helper), which use the
/// parser's hash index for O(1) lookups.
///
/// # Constructing tables
///
/// To build a table programmatically, create one with [`Table::new`] and
/// insert entries with [`Table::insert`]. An [`Arena`](crate::Arena) is
/// required because entries are arena allocated.
///
/// # Iteration
///
/// `Table` implements [`IntoIterator`] (both by reference and by value),
/// yielding `(`[`Key`]`, `[`Item`]`)` pairs.
///
/// [`FromToml`]: crate::FromToml
/// [`ToToml`]: crate::ToToml
#[repr(C)]
pub struct Table<'de> {
    pub(crate) value: InnerTable<'de>,
    pub(crate) meta: ItemMetadata,
}

impl<'de> Table<'de> {
    /// Creates an empty table in format-hints mode (no source span).
    ///
    /// The table starts with automatic style: normalization will choose
    /// between inline and header based on content. Call [`set_style`](Self::set_style)
    /// to override.
    pub fn new() -> Table<'de> {
        let mut meta = ItemMetadata::hints(TAG_TABLE, FLAG_TABLE);
        meta.set_auto_style();
        Table {
            meta,
            value: InnerTable::new(),
        }
    }

    /// Creates an empty table with pre-allocated capacity.
    ///
    /// Returns `None` if `cap` exceeds `u32::MAX`.
    pub fn try_with_capacity(cap: usize, arena: &'de Arena) -> Option<Table<'de>> {
        let cap: u32 = cap.try_into().ok()?;
        let mut meta = ItemMetadata::hints(TAG_TABLE, FLAG_TABLE);
        meta.set_auto_style();
        Some(Table {
            meta,
            value: InnerTable::with_capacity(cap, arena),
        })
    }

    /// Creates an empty table in span mode (parser-produced).
    pub(crate) fn new_spanned(span: Span) -> Table<'de> {
        Table {
            meta: ItemMetadata::spanned(TAG_TABLE, FLAG_TABLE, span.start, span.end),
            value: InnerTable::new(),
        }
    }

    /// Returns the source span, or `0..0` if this table was constructed
    /// programmatically (format-hints mode).
    pub fn span(&self) -> Span {
        self.meta.span()
    }
}

impl<'de> Default for Table<'de> {
    fn default() -> Self {
        Self::new()
    }
}

impl PartialEq for Table<'_> {
    fn eq(&self, other: &Self) -> bool {
        super::equal_tables(self, other, None)
    }
}

impl std::fmt::Debug for Table<'_> {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        self.value.fmt(f)
    }
}

impl<'de> Table<'de> {
    /// Inserts a key-value pair, replacing any existing entry with the same key.
    ///
    /// Performs an O(n) linear scan for duplicates. Prefer [`Table::insert_unique`]
    /// when the key is known to be absent.
    pub fn insert(&mut self, key: Key<'de>, value: Item<'de>, arena: &'de Arena) {
        if let Some(existing) = self.get_mut(key.name) {
            *existing = value;
            return;
        }
        self.value.insert_unique(key, value, arena);
    }

    /// Inserts a key-value pair without checking for duplicates.
    ///
    /// Inserting a duplicate key is sound but the behavior is unspecified. It may
    /// panic, produce invalid TOML on emit, or cause deserialization errors.
    pub fn insert_unique(&mut self, key: Key<'de>, value: Item<'de>, arena: &'de Arena) {
        self.value.insert_unique(key, value, arena);
    }
    /// Returns the number of entries.
    #[inline]
    pub fn len(&self) -> usize {
        self.value.len()
    }

    /// Returns `true` if the table has no entries.
    #[inline]
    pub fn is_empty(&self) -> bool {
        self.value.is_empty()
    }

    /// Linear scan for a key, returning both key and value references.
    pub fn get_key_value(&self, name: &str) -> Option<(&Key<'de>, &Item<'de>)> {
        self.value.get_entry(name)
    }

    /// Returns a reference to the value for `name`.
    pub fn get(&self, name: &str) -> Option<&Item<'de>> {
        self.value.get(name)
    }

    /// Returns a mutable reference to the value for `name`.
    pub fn get_mut(&mut self, name: &str) -> Option<&mut Item<'de>> {
        self.value.get_mut(name)
    }

    /// Returns `true` if the table contains the key.
    #[inline]
    pub fn contains_key(&self, name: &str) -> bool {
        self.value.contains_key(name)
    }

    /// Removes the first entry matching `name`, returning the key-value pair.
    /// Uses swap-remove, so the ordering of remaining entries may change.
    pub fn remove_entry(&mut self, name: &str) -> Option<(Key<'de>, Item<'de>)> {
        self.value.remove_entry(name)
    }

    /// Returns a slice of all entries.
    #[inline]
    pub fn entries(&self) -> &[TableEntry<'de>] {
        self.value.entries()
    }
    /// Returns a slice of all entries.
    #[inline]
    pub fn entries_mut(&mut self) -> &mut [TableEntry<'de>] {
        self.value.entries_mut()
    }

    /// Returns an iterator over all entries (key-value pairs).
    #[inline]
    pub fn iter(&self) -> std::slice::Iter<'_, TableEntry<'de>> {
        self.entries().iter()
    }

    /// Converts this `Table` into an [`Item`] with the same span and payload.
    pub fn as_item(&self) -> &Item<'de> {
        // SAFETY: Table is #[repr(C)] { InnerTable, ItemMetadata }.
        // Item  is #[repr(C)] { Payload,    ItemMetadata }.
        // Payload is a union whose `table` field is ManuallyDrop<InnerTable>
        // (#[repr(transparent)]). Both types are 24 bytes, align 8 (verified
        // by const assertions). The field offsets match (data at 0..16,
        // metadata at 16..24). Only a shared reference is returned, so no
        // mutation can change the tag.
        unsafe { &*(self as *const Table<'de>).cast::<Item<'de>>() }
    }

    /// Converts this `Table` into an [`Item`] with the same span and payload.
    pub fn into_item(self) -> Item<'de> {
        // SAFETY: Same layout argument as as_item(). Size and alignment
        // equality verified by const assertions below. The tag in
        // ItemMetadata is preserved unchanged through the transmute.
        unsafe { std::mem::transmute(self) }
    }
}

impl<'a, 'de> IntoIterator for &'a mut Table<'de> {
    type Item = &'a mut (Key<'de>, Item<'de>);
    type IntoIter = std::slice::IterMut<'a, TableEntry<'de>>;

    fn into_iter(self) -> Self::IntoIter {
        self.value.entries_mut().iter_mut()
    }
}
impl<'a, 'de> IntoIterator for &'a Table<'de> {
    type Item = &'a (Key<'de>, Item<'de>);
    type IntoIter = std::slice::Iter<'a, TableEntry<'de>>;

    fn into_iter(self) -> Self::IntoIter {
        self.value.entries().iter()
    }
}

impl<'de> IntoIterator for Table<'de> {
    type Item = (Key<'de>, Item<'de>);
    type IntoIter = IntoIter<'de>;

    fn into_iter(self) -> Self::IntoIter {
        IntoIter {
            table: self.value,
            index: 0,
        }
    }
}

const _: () = assert!(std::mem::size_of::<Table<'_>>() == std::mem::size_of::<Item<'_>>());
const _: () = assert!(std::mem::align_of::<Table<'_>>() == std::mem::align_of::<Item<'_>>());

// SAFETY: `Table` is layout-compatible with `Item` when the item's tag is
// `TAG_TABLE` (see `as_item`/`into_item`). The underlying `InnerTable` stores
// a `NonNull` into arena memory and a length/capacity pair, with no interior
// mutability: every mutation of the entries vector requires `&mut Table`, so
// shared readers cannot race with a writer. The arena backing the entries is
// itself borrowed for `'de`, and any inner `Item`s transitively satisfy the
// same invariants (see `unsafe impl Send/Sync for Item`).
unsafe impl Send for Table<'_> {}
unsafe impl Sync for Table<'_> {}

// SAFETY: `IntoIter` owns the `InnerTable` it drains. The same reasoning as
// for `Table` applies: it has no interior mutability and its `NonNull` points
// into arena storage that the iterator exclusively reads through `&mut self`.
unsafe impl Send for IntoIter<'_> {}
unsafe impl Sync for IntoIter<'_> {}

impl<'de> Table<'de> {
    #[inline]
    pub(crate) fn span_start(&self) -> u32 {
        self.meta.span_start()
    }

    #[inline]
    pub(crate) fn set_span_start(&mut self, v: u32) {
        self.meta.set_span_start(v);
    }

    #[inline]
    pub(crate) fn set_span_end(&mut self, v: u32) {
        self.meta.set_span_end(v);
    }

    #[inline]
    pub(crate) fn extend_span_end(&mut self, new_end: u32) {
        self.meta.extend_span_end(new_end);
    }

    #[inline]
    pub(crate) fn set_header_flag(&mut self) {
        self.meta.set_flag(FLAG_HEADER);
    }

    #[inline]
    pub(crate) fn set_dotted_flag(&mut self) {
        self.meta.set_flag(FLAG_DOTTED);
    }

    /// Returns the [`TableStyle`] recorded on this table.
    #[inline]
    pub fn style(&self) -> TableStyle {
        match self.meta.flag() {
            FLAG_DOTTED => TableStyle::Dotted,
            FLAG_HEADER => TableStyle::Header,
            FLAG_FROZEN => TableStyle::Inline,
            _ => TableStyle::Implicit,
        }
    }

    /// Pins the [`TableStyle`] used when this table is emitted.
    ///
    /// Tables built programmatically start out with an unresolved style and
    /// emit picks a rendering from their size and contents (inline for small
    /// tables, a header otherwise). Calling this method locks in `kind` so
    /// the choice survives emission unchanged.
    #[inline]
    pub fn set_style(&mut self, kind: TableStyle) {
        let flag = match kind {
            TableStyle::Implicit => FLAG_TABLE,
            TableStyle::Dotted => FLAG_DOTTED,
            TableStyle::Header => FLAG_HEADER,
            TableStyle::Inline => FLAG_FROZEN,
        };
        self.meta.set_flag(flag);
        self.meta.clear_auto_style();
    }

    /// Deep-clones this table into `arena`. Keys and strings are shared
    /// with the source.
    pub fn clone_in(&self, arena: &'de Arena) -> Table<'de> {
        Table {
            value: self.value.clone_in(arena),
            meta: self.meta,
        }
    }
}

impl<'de> std::ops::Index<&str> for Table<'de> {
    type Output = MaybeItem<'de>;

    #[inline]
    fn index(&self, index: &str) -> &Self::Output {
        if let Some(item) = self.get(index) {
            return MaybeItem::from_ref(item);
        }
        &NONE
    }
}