Skip to main content

toml_spanner/item/
table.rs

1#[cfg(test)]
2#[path = "./table_tests.rs"]
3mod tests;
4
5use crate::Span;
6use crate::item::{
7    FLAG_DOTTED, FLAG_FROZEN, FLAG_HEADER, FLAG_TABLE, Item, ItemMetadata, Key, MaybeItem, NONE,
8    TAG_TABLE, TableStyle,
9};
10use crate::parser::KeyRef;
11use std::mem::size_of;
12use std::ptr::NonNull;
13
14use crate::arena::Arena;
15
16pub(crate) type TableIndex<'de> = foldhash::HashMap<KeyRef<'de>, usize>;
17
18type TableEntry<'de> = (Key<'de>, Item<'de>);
19
20const MIN_CAP: u32 = 2;
21
22/// A TOML table: a flat list of key-value pairs with linear lookup.
23#[repr(C, align(8))]
24pub(crate) struct InnerTable<'de> {
25    pub(super) len: u32,
26    pub(super) cap: u32,
27    pub(super) ptr: NonNull<TableEntry<'de>>,
28}
29
30impl<'de> InnerTable<'de> {
31    /// Creates an empty table.
32    #[inline]
33    pub fn new() -> Self {
34        Self {
35            len: 0,
36            cap: 0,
37            ptr: NonNull::dangling(),
38        }
39    }
40
41    pub(crate) fn with_capacity(cap: u32, arena: &'de Arena) -> Self {
42        let mut table = Self::new();
43        if cap > 0 {
44            table.grow_to(cap, arena);
45        }
46        table
47    }
48
49    /// Inserts a key-value pair. Does **not** check for duplicates.
50    pub fn insert_unique(
51        &mut self,
52        key: Key<'de>,
53        item: Item<'de>,
54        arena: &'de Arena,
55    ) -> &mut TableEntry<'de> {
56        let len = self.len;
57        if self.len == self.cap {
58            self.grow(arena);
59        }
60        // SAFETY: grow() ensures len < cap, so ptr.add(len) is within the
61        // allocation. The write targets uninitialized memory past the current length.
62        unsafe {
63            let ptr = self.ptr.as_ptr().add(len as usize);
64            ptr.write((key, item));
65            self.len = len + 1;
66            &mut (*ptr)
67        }
68    }
69
70    /// Returns the number of entries.
71    #[inline]
72    pub fn len(&self) -> usize {
73        self.len as usize
74    }
75
76    /// Returns `true` if the table has no entries.
77    #[inline]
78    pub fn is_empty(&self) -> bool {
79        self.len == 0
80    }
81    /// Looks up a key using the parser's hash index when the table is large
82    /// enough, falling back to a linear scan for small tables.
83    ///
84    /// Both the table and the index must originate from the same `parse` call
85    /// and neither must have been mutated since parsing.
86    #[cfg(feature = "to-toml")]
87    pub(crate) fn get_entry_with_index(
88        &self,
89        key: &str,
90        index: &TableIndex<'_>,
91    ) -> Option<&TableEntry<'de>> {
92        if self.len() > crate::parser::INDEXED_TABLE_THRESHOLD {
93            // SAFETY: len > INDEXED_TABLE_THRESHOLD (> 6), so the table is non-empty.
94            let first_key_span = unsafe { self.first_key_span_start_unchecked() };
95            let i = *index.get(&KeyRef::new(key, first_key_span))?;
96            self.entries().get(i)
97        } else {
98            for entry in self.entries() {
99                if entry.0.name == key {
100                    return Some(entry);
101                }
102            }
103            None
104        }
105    }
106
107    pub(crate) fn get_entry_with_maybe_index(
108        &self,
109        key: &str,
110        index: Option<&TableIndex<'_>>,
111    ) -> Option<&TableEntry<'de>> {
112        if self.len() > crate::parser::INDEXED_TABLE_THRESHOLD {
113            if let Some(index) = index {
114                // SAFETY: len > INDEXED_TABLE_THRESHOLD (> 6), so the table is non-empty.
115                let first_key_span = unsafe { self.first_key_span_start_unchecked() };
116                let i = *index.get(&KeyRef::new(key, first_key_span))?;
117                return self.entries().get(i);
118            }
119        }
120        for entry in self.entries() {
121            if entry.0.name == key {
122                return Some(entry);
123            }
124        }
125        None
126    }
127    /// Linear scan for a key, returning both key and value references.
128    pub fn get_entry(&self, name: &str) -> Option<(&Key<'de>, &Item<'de>)> {
129        for (key, item) in self.entries() {
130            if key.name == name {
131                return Some((key, item));
132            }
133        }
134        None
135    }
136
137    /// Returns a reference to the value for `name`.
138    pub fn get(&self, name: &str) -> Option<&Item<'de>> {
139        for (key, item) in self.entries() {
140            if key.name == name {
141                return Some(item);
142            }
143        }
144        None
145    }
146
147    /// Returns a mutable reference to the value for `name`.
148    pub fn get_mut(&mut self, name: &str) -> Option<&mut Item<'de>> {
149        for (key, item) in self.entries_mut() {
150            if key.name == name {
151                return Some(item);
152            }
153        }
154        None
155    }
156
157    /// Returns `true` if the table contains the key.
158    #[inline]
159    pub fn contains_key(&self, name: &str) -> bool {
160        self.get(name).is_some()
161    }
162
163    /// Removes the first entry matching `name`, returning the key-value pair.
164    /// Uses swap-remove, so the ordering of remaining entries may change.
165    pub fn remove_entry(&mut self, name: &str) -> Option<(Key<'de>, Item<'de>)> {
166        let idx = self.find_index(name)?;
167        Some(self.remove_at(idx))
168    }
169
170    /// Returns the span start of the first key. Used as a table discriminator
171    /// in the parser's hash index.
172    ///
173    /// # Safety
174    ///
175    /// The table must be non-empty (`self.len > 0`).
176    #[inline]
177    pub(crate) unsafe fn first_key_span_start_unchecked(&self) -> u32 {
178        debug_assert!(self.len > 0);
179        // SAFETY: caller guarantees len > 0, so the first entry is initialized.
180        unsafe { (*self.ptr.as_ptr()).0.span.start }
181    }
182
183    /// Returns a slice of all entries.
184    #[inline]
185    pub fn entries(&self) -> &[TableEntry<'de>] {
186        // SAFETY: ptr points to arena-allocated memory with at least len
187        // initialized entries. When len == 0, ptr is NonNull::dangling() which
188        // satisfies from_raw_parts' alignment requirement for zero-length slices.
189        unsafe { std::slice::from_raw_parts(self.ptr.as_ptr(), self.len as usize) }
190    }
191
192    #[inline]
193    pub fn entries_mut(&mut self) -> &mut [TableEntry<'de>] {
194        // SAFETY: same as entries() — ptr is valid for len initialized entries.
195        unsafe { std::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len as usize) }
196    }
197
198    pub(crate) fn find_index(&self, name: &str) -> Option<usize> {
199        for (i, (key, _)) in self.entries().iter().enumerate() {
200            if key.name == name {
201                return Some(i);
202            }
203        }
204        None
205    }
206
207    /// Remove entry at `idx` by swapping it with the last entry.
208    fn remove_at(&mut self, idx: usize) -> (Key<'de>, Item<'de>) {
209        let last = self.len as usize - 1;
210        // SAFETY: idx was returned by find_index, so idx < len and the
211        // pointer is within initialized entries. read() moves the value out.
212        let ptr = unsafe { self.ptr.as_ptr().add(idx) };
213        let entry = unsafe { ptr.read() };
214        if idx != last {
215            // Safety: `last` is a valid, initialized index distinct from `idx`.
216            unsafe {
217                ptr.write(self.ptr.as_ptr().add(last).read());
218            }
219        }
220        self.len -= 1;
221        entry
222    }
223
224    #[cold]
225    fn grow(&mut self, arena: &'de Arena) {
226        let new_cap = if self.cap == 0 {
227            MIN_CAP
228        } else {
229            self.cap.checked_mul(2).expect("capacity overflow")
230        };
231        self.grow_to(new_cap, arena);
232    }
233
234    fn grow_to(&mut self, new_cap: u32, arena: &'de Arena) {
235        // On 64-bit, u32 * size_of::<TableEntry>() cannot overflow usize.
236        #[cfg(target_pointer_width = "32")]
237        let new_size = (new_cap as usize)
238            .checked_mul(size_of::<TableEntry<'_>>())
239            .expect("capacity overflow");
240        #[cfg(not(target_pointer_width = "32"))]
241        let new_size = new_cap as usize * size_of::<TableEntry<'_>>();
242        if self.cap > 0 {
243            let old_size = self.cap as usize * size_of::<TableEntry<'_>>();
244            // Safety: ptr was returned by a prior arena alloc of old_size bytes.
245            self.ptr = unsafe { arena.realloc(self.ptr.cast(), old_size, new_size).cast() };
246        } else {
247            self.ptr = arena.alloc(new_size).cast();
248        }
249        self.cap = new_cap;
250    }
251
252    /// Deep-clones this table into `arena`. Keys and strings are shared
253    /// with the source.
254    pub(crate) fn clone_in(&self, arena: &'de Arena) -> Self {
255        let len = self.len as usize;
256        if len == 0 {
257            return Self::new();
258        }
259        let size = len * size_of::<TableEntry<'de>>();
260        let dst: NonNull<TableEntry<'de>> = arena.alloc(size).cast();
261        let src = self.ptr.as_ptr();
262        let dst_ptr = dst.as_ptr();
263
264        let mut run_start = 0;
265        for i in 0..len {
266            // SAFETY: i < len, so src.add(i) is within initialized entries.
267            if unsafe { !(*src.add(i)).1.is_scalar() } {
268                if run_start < i {
269                    // SAFETY: entries[run_start..i] have scalar values — Key is
270                    // Copy, scalar Items are bitwise-copyable. Source and
271                    // destination are disjoint arena regions.
272                    unsafe {
273                        std::ptr::copy_nonoverlapping(
274                            src.add(run_start),
275                            dst_ptr.add(run_start),
276                            i - run_start,
277                        );
278                    }
279                }
280                // SAFETY: the entry's value is an aggregate; deep-clone it.
281                // Key is Copy.
282                unsafe {
283                    let (key, item) = &*src.add(i);
284                    dst_ptr.add(i).write((*key, item.clone_in(arena)));
285                }
286                run_start = i + 1;
287            }
288        }
289        if run_start < len {
290            unsafe {
291                std::ptr::copy_nonoverlapping(
292                    src.add(run_start),
293                    dst_ptr.add(run_start),
294                    len - run_start,
295                );
296            }
297        }
298
299        Self {
300            len: self.len,
301            cap: self.len,
302            ptr: dst,
303        }
304    }
305
306    /// Copies this table into `target`, returning a copy with `'static` lifetime.
307    ///
308    /// # Safety
309    ///
310    /// `target` must have sufficient space as computed by
311    /// [`compute_size`](crate::item::owned).
312    pub(crate) unsafe fn emplace_in(
313        &self,
314        target: &mut crate::item::owned::ItemCopyTarget,
315    ) -> InnerTable<'static> {
316        let len = self.len as usize;
317        if len == 0 {
318            return InnerTable::new();
319        }
320        let byte_size = len * size_of::<TableEntry<'static>>();
321        // SAFETY: Caller guarantees sufficient aligned space for len entries.
322        let dst_ptr = unsafe { target.alloc_aligned(byte_size) }
323            .as_ptr()
324            .cast::<TableEntry<'static>>();
325        for (i, (key, item)) in self.entries().iter().enumerate() {
326            let new_key = Key {
327                name: unsafe { target.copy_str(key.name) },
328                span: key.span,
329            };
330            let new_item = unsafe { item.emplace_in(target) };
331            // SAFETY: i < len, within the allocated region.
332            unsafe { dst_ptr.add(i).write((new_key, new_item)) };
333        }
334        InnerTable {
335            len: self.len,
336            cap: self.len,
337            // SAFETY: dst_ptr is non-null (from alloc_aligned).
338            ptr: unsafe { NonNull::new_unchecked(dst_ptr) },
339        }
340    }
341}
342
343impl std::fmt::Debug for InnerTable<'_> {
344    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
345        let mut map = f.debug_map();
346        for (k, v) in self.entries() {
347            map.entry(k, v);
348        }
349        map.finish()
350    }
351}
352
353/// Consuming iterator over a [`Table`], yielding `(`[`Key`]`, `[`Item`]`)` pairs.
354pub struct IntoIter<'de> {
355    table: InnerTable<'de>,
356    index: u32,
357}
358
359impl<'de> Iterator for IntoIter<'de> {
360    type Item = (Key<'de>, Item<'de>);
361
362    fn next(&mut self) -> Option<Self::Item> {
363        if self.index < self.table.len {
364            // SAFETY: index < len is checked above, so the read is within
365            // bounds of initialized entries.
366            let entry = unsafe { self.table.ptr.as_ptr().add(self.index as usize).read() };
367            self.index += 1;
368            Some(entry)
369        } else {
370            None
371        }
372    }
373
374    fn size_hint(&self) -> (usize, Option<usize>) {
375        let remaining = (self.table.len - self.index) as usize;
376        (remaining, Some(remaining))
377    }
378}
379
380impl ExactSizeIterator for IntoIter<'_> {}
381
382/// A TOML table, a transient collection of key/value pairs inside an [`Item`].
383///
384/// `Table` is an intermediate structure for converting between Rust types and
385/// TOML through [`FromToml`] and [`ToToml`]. It is the top level value
386/// returned by [`parse`](crate::parse) and the value inside any `[section]`
387/// or inline `{ ... }` table.
388///
389/// <div class="warning">
390///
391/// Entries are stored in insertion order, but the TOML specification defines
392/// table keys as unordered. Avoid relying on iteration order for semantic
393/// purposes.
394///
395/// </div>
396///
397/// # Accessing values
398///
399/// Use `table["key"]` for index access, which returns a [`MaybeItem`] that
400/// never panics on missing keys and supports chained indexing. Use
401/// [`get`](Self::get) or [`get_mut`](Self::get_mut) for `Option` based access.
402///
403/// For structured extraction, use [`Item::table_helper`](crate::Item::table_helper)
404/// to create a [`TableHelper`](crate::de::TableHelper).
405///
406/// # Lookup performance
407///
408/// Direct lookups ([`get`](Self::get), `table["key"]`) perform a linear scan
409/// over entries, O(n) in the number of keys. For small tables or a handful
410/// of lookups, as is typical in TOML, this is fast enough.
411///
412/// For structured conversion of larger tables, use
413/// [`TableHelper`](crate::de::TableHelper) via
414/// [`Document::table_helper`](crate::Document::table_helper) or
415/// [`Item::table_helper`](crate::Item::table_helper), which use the
416/// parser's hash index for O(1) lookups.
417///
418/// # Constructing tables
419///
420/// To build a table programmatically, create one with [`Table::new`] and
421/// insert entries with [`Table::insert`]. An [`Arena`](crate::Arena) is
422/// required because entries are arena allocated.
423///
424/// # Iteration
425///
426/// `Table` implements [`IntoIterator`] (both by reference and by value),
427/// yielding `(`[`Key`]`, `[`Item`]`)` pairs.
428///
429/// [`FromToml`]: crate::FromToml
430/// [`ToToml`]: crate::ToToml
431#[repr(C)]
432pub struct Table<'de> {
433    pub(crate) value: InnerTable<'de>,
434    pub(crate) meta: ItemMetadata,
435}
436
437impl<'de> Table<'de> {
438    /// Creates an empty table in format-hints mode (no source span).
439    ///
440    /// The table starts with automatic style: normalization will choose
441    /// between inline and header based on content. Call [`set_style`](Self::set_style)
442    /// to override.
443    pub fn new() -> Table<'de> {
444        let mut meta = ItemMetadata::hints(TAG_TABLE, FLAG_TABLE);
445        meta.set_auto_style();
446        Table {
447            meta,
448            value: InnerTable::new(),
449        }
450    }
451
452    /// Creates an empty table with pre-allocated capacity.
453    ///
454    /// Returns `None` if `cap` exceeds `u32::MAX`.
455    pub fn try_with_capacity(cap: usize, arena: &'de Arena) -> Option<Table<'de>> {
456        let cap: u32 = cap.try_into().ok()?;
457        let mut meta = ItemMetadata::hints(TAG_TABLE, FLAG_TABLE);
458        meta.set_auto_style();
459        Some(Table {
460            meta,
461            value: InnerTable::with_capacity(cap, arena),
462        })
463    }
464
465    /// Creates an empty table in span mode (parser-produced).
466    pub(crate) fn new_spanned(span: Span) -> Table<'de> {
467        Table {
468            meta: ItemMetadata::spanned(TAG_TABLE, FLAG_TABLE, span.start, span.end),
469            value: InnerTable::new(),
470        }
471    }
472
473    /// Returns the source span, or `0..0` if this table was constructed
474    /// programmatically (format-hints mode).
475    pub fn span(&self) -> Span {
476        self.meta.span()
477    }
478}
479
480impl<'de> Default for Table<'de> {
481    fn default() -> Self {
482        Self::new()
483    }
484}
485
486impl PartialEq for Table<'_> {
487    fn eq(&self, other: &Self) -> bool {
488        super::equal_tables(self, other, None)
489    }
490}
491
492impl std::fmt::Debug for Table<'_> {
493    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
494        self.value.fmt(f)
495    }
496}
497
498impl<'de> Table<'de> {
499    /// Inserts a key-value pair, replacing any existing entry with the same key.
500    ///
501    /// Performs an O(n) linear scan for duplicates. Prefer [`Table::insert_unique`]
502    /// when the key is known to be absent.
503    pub fn insert(&mut self, key: Key<'de>, value: Item<'de>, arena: &'de Arena) {
504        if let Some(existing) = self.get_mut(key.name) {
505            *existing = value;
506            return;
507        }
508        self.value.insert_unique(key, value, arena);
509    }
510
511    /// Inserts a key-value pair without checking for duplicates.
512    ///
513    /// Inserting a duplicate key is sound but the behavior is unspecified. It may
514    /// panic, produce invalid TOML on emit, or cause deserialization errors.
515    pub fn insert_unique(&mut self, key: Key<'de>, value: Item<'de>, arena: &'de Arena) {
516        self.value.insert_unique(key, value, arena);
517    }
518    /// Returns the number of entries.
519    #[inline]
520    pub fn len(&self) -> usize {
521        self.value.len()
522    }
523
524    /// Returns `true` if the table has no entries.
525    #[inline]
526    pub fn is_empty(&self) -> bool {
527        self.value.is_empty()
528    }
529
530    /// Linear scan for a key, returning both key and value references.
531    pub fn get_key_value(&self, name: &str) -> Option<(&Key<'de>, &Item<'de>)> {
532        self.value.get_entry(name)
533    }
534
535    /// Returns a reference to the value for `name`.
536    pub fn get(&self, name: &str) -> Option<&Item<'de>> {
537        self.value.get(name)
538    }
539
540    /// Returns a mutable reference to the value for `name`.
541    pub fn get_mut(&mut self, name: &str) -> Option<&mut Item<'de>> {
542        self.value.get_mut(name)
543    }
544
545    /// Returns `true` if the table contains the key.
546    #[inline]
547    pub fn contains_key(&self, name: &str) -> bool {
548        self.value.contains_key(name)
549    }
550
551    /// Removes the first entry matching `name`, returning the key-value pair.
552    /// Uses swap-remove, so the ordering of remaining entries may change.
553    pub fn remove_entry(&mut self, name: &str) -> Option<(Key<'de>, Item<'de>)> {
554        self.value.remove_entry(name)
555    }
556
557    /// Returns a slice of all entries.
558    #[inline]
559    pub fn entries(&self) -> &[TableEntry<'de>] {
560        self.value.entries()
561    }
562    /// Returns a slice of all entries.
563    #[inline]
564    pub fn entries_mut(&mut self) -> &mut [TableEntry<'de>] {
565        self.value.entries_mut()
566    }
567
568    /// Returns an iterator over all entries (key-value pairs).
569    #[inline]
570    pub fn iter(&self) -> std::slice::Iter<'_, TableEntry<'de>> {
571        self.entries().iter()
572    }
573
574    /// Converts this `Table` into an [`Item`] with the same span and payload.
575    pub fn as_item(&self) -> &Item<'de> {
576        // SAFETY: Table is #[repr(C)] { InnerTable, ItemMetadata }.
577        // Item  is #[repr(C)] { Payload,    ItemMetadata }.
578        // Payload is a union whose `table` field is ManuallyDrop<InnerTable>
579        // (#[repr(transparent)]). Both types are 24 bytes, align 8 (verified
580        // by const assertions). The field offsets match (data at 0..16,
581        // metadata at 16..24). Only a shared reference is returned, so no
582        // mutation can change the tag.
583        unsafe { &*(self as *const Table<'de>).cast::<Item<'de>>() }
584    }
585
586    /// Converts this `Table` into an [`Item`] with the same span and payload.
587    pub fn into_item(self) -> Item<'de> {
588        // SAFETY: Same layout argument as as_item(). Size and alignment
589        // equality verified by const assertions below. The tag in
590        // ItemMetadata is preserved unchanged through the transmute.
591        unsafe { std::mem::transmute(self) }
592    }
593}
594
595impl<'a, 'de> IntoIterator for &'a mut Table<'de> {
596    type Item = &'a mut (Key<'de>, Item<'de>);
597    type IntoIter = std::slice::IterMut<'a, TableEntry<'de>>;
598
599    fn into_iter(self) -> Self::IntoIter {
600        self.value.entries_mut().iter_mut()
601    }
602}
603impl<'a, 'de> IntoIterator for &'a Table<'de> {
604    type Item = &'a (Key<'de>, Item<'de>);
605    type IntoIter = std::slice::Iter<'a, TableEntry<'de>>;
606
607    fn into_iter(self) -> Self::IntoIter {
608        self.value.entries().iter()
609    }
610}
611
612impl<'de> IntoIterator for Table<'de> {
613    type Item = (Key<'de>, Item<'de>);
614    type IntoIter = IntoIter<'de>;
615
616    fn into_iter(self) -> Self::IntoIter {
617        IntoIter {
618            table: self.value,
619            index: 0,
620        }
621    }
622}
623
624const _: () = assert!(std::mem::size_of::<Table<'_>>() == std::mem::size_of::<Item<'_>>());
625const _: () = assert!(std::mem::align_of::<Table<'_>>() == std::mem::align_of::<Item<'_>>());
626
627// SAFETY: `Table` is layout-compatible with `Item` when the item's tag is
628// `TAG_TABLE` (see `as_item`/`into_item`). The underlying `InnerTable` stores
629// a `NonNull` into arena memory and a length/capacity pair, with no interior
630// mutability: every mutation of the entries vector requires `&mut Table`, so
631// shared readers cannot race with a writer. The arena backing the entries is
632// itself borrowed for `'de`, and any inner `Item`s transitively satisfy the
633// same invariants (see `unsafe impl Send/Sync for Item`).
634unsafe impl Send for Table<'_> {}
635unsafe impl Sync for Table<'_> {}
636
637// SAFETY: `IntoIter` owns the `InnerTable` it drains. The same reasoning as
638// for `Table` applies: it has no interior mutability and its `NonNull` points
639// into arena storage that the iterator exclusively reads through `&mut self`.
640unsafe impl Send for IntoIter<'_> {}
641unsafe impl Sync for IntoIter<'_> {}
642
643impl<'de> Table<'de> {
644    #[inline]
645    pub(crate) fn span_start(&self) -> u32 {
646        self.meta.span_start()
647    }
648
649    #[inline]
650    pub(crate) fn set_span_start(&mut self, v: u32) {
651        self.meta.set_span_start(v);
652    }
653
654    #[inline]
655    pub(crate) fn set_span_end(&mut self, v: u32) {
656        self.meta.set_span_end(v);
657    }
658
659    #[inline]
660    pub(crate) fn extend_span_end(&mut self, new_end: u32) {
661        self.meta.extend_span_end(new_end);
662    }
663
664    #[inline]
665    pub(crate) fn set_header_flag(&mut self) {
666        self.meta.set_flag(FLAG_HEADER);
667    }
668
669    #[inline]
670    pub(crate) fn set_dotted_flag(&mut self) {
671        self.meta.set_flag(FLAG_DOTTED);
672    }
673
674    /// Returns the [`TableStyle`] recorded on this table.
675    #[inline]
676    pub fn style(&self) -> TableStyle {
677        match self.meta.flag() {
678            FLAG_DOTTED => TableStyle::Dotted,
679            FLAG_HEADER => TableStyle::Header,
680            FLAG_FROZEN => TableStyle::Inline,
681            _ => TableStyle::Implicit,
682        }
683    }
684
685    /// Pins the [`TableStyle`] used when this table is emitted.
686    ///
687    /// Tables built programmatically start out with an unresolved style and
688    /// emit picks a rendering from their size and contents (inline for small
689    /// tables, a header otherwise). Calling this method locks in `kind` so
690    /// the choice survives emission unchanged.
691    #[inline]
692    pub fn set_style(&mut self, kind: TableStyle) {
693        let flag = match kind {
694            TableStyle::Implicit => FLAG_TABLE,
695            TableStyle::Dotted => FLAG_DOTTED,
696            TableStyle::Header => FLAG_HEADER,
697            TableStyle::Inline => FLAG_FROZEN,
698        };
699        self.meta.set_flag(flag);
700        self.meta.clear_auto_style();
701    }
702
703    /// Deep-clones this table into `arena`. Keys and strings are shared
704    /// with the source.
705    pub fn clone_in(&self, arena: &'de Arena) -> Table<'de> {
706        Table {
707            value: self.value.clone_in(arena),
708            meta: self.meta,
709        }
710    }
711}
712
713impl<'de> std::ops::Index<&str> for Table<'de> {
714    type Output = MaybeItem<'de>;
715
716    #[inline]
717    fn index(&self, index: &str) -> &Self::Output {
718        if let Some(item) = self.get(index) {
719            return MaybeItem::from_ref(item);
720        }
721        &NONE
722    }
723}