Skip to main content

hjkl_engine/
buffer_impl.rs

1//! Canonical [`Buffer`] trait impl over [`hjkl_buffer::Buffer`].
2//!
3//! Wires the SPEC trait surface (`Cursor` / `Query` / `BufferEdit` /
4//! `Search`, sealed via [`crate::types::sealed::Sealed`]) onto the
5//! in-tree rope-backed buffer. Pos⇄Position conversion lives at this
6//! boundary — engine code (FSM, editor) keeps using `hjkl_buffer`'s
7//! concrete API directly until the motion / fold relocation lands;
8//! external trait users see the SPEC surface.
9//!
10//! See `crates/hjkl-engine/SPEC.md` §"`Buffer` trait surface".
11//!
12//! # Why concrete-Editor today
13//!
14//! The trait surface here is 13 methods. The engine FSM today calls
15//! ~46 distinct methods on `hjkl_buffer::Buffer` — most of them are
16//! motion / fold / viewport helpers that SPEC.md explicitly **excludes**
17//! from the trait ("motions don't belong on `Buffer` — they're computed
18//! over the buffer, not delegated to it"). Generic-ifying
19//! `Editor<B: Buffer, H: Host>` therefore requires relocating those
20//! ~33 helpers from `hjkl-buffer` into `hjkl-engine` as free functions
21//! over `B: Cursor + Query`. That's a separate, multi-thousand-LOC
22//! patch tracked for the 0.1.0 cut.
23//!
24//! Until then this module ships the canonical impl + a compile-time
25//! assertion that `hjkl_buffer::Buffer` satisfies the trait, so
26//! downstream callers can write `fn f<B: hjkl_engine::Buffer>(…)`
27//! today and the engine's own `Editor` becomes generic over `B` in a
28//! follow-up patch without breaking the trait contract.
29
30use std::borrow::Cow;
31
32use hjkl_buffer::Buffer as RopeBuffer;
33use hjkl_buffer::Position;
34use regex::Regex;
35
36use crate::types::sealed::Sealed;
37use crate::types::{Buffer, BufferEdit, Cursor, FoldOp, FoldProvider, Pos, Query, Search};
38
39// ── Pos ⇄ Position conversion ──────────────────────────────────────
40
41/// Engine [`Pos`] → buffer [`Position`].
42///
43/// Engine `Pos` is `(line: u32, col: u32)` grapheme-indexed; buffer
44/// [`Position`] is `(row: usize, col: usize)` char-indexed. The two
45/// indexings happen to match for the in-tree rope today (graphemes
46/// without combining marks == chars); future grapheme-aware backends
47/// will need to thread a real grapheme→char map through this fn.
48#[inline]
49pub(crate) fn pos_to_position(p: Pos) -> Position {
50    Position {
51        row: p.line as usize,
52        col: p.col as usize,
53    }
54}
55
56/// Buffer [`Position`] → engine [`Pos`].
57#[inline]
58pub(crate) fn position_to_pos(p: Position) -> Pos {
59    Pos {
60        line: p.row as u32,
61        col: p.col as u32,
62    }
63}
64
65// ── Sealed marker ──────────────────────────────────────────────────
66
67impl Sealed for RopeBuffer {}
68
69// ── Cursor ─────────────────────────────────────────────────────────
70
71impl Cursor for RopeBuffer {
72    fn cursor(&self) -> Pos {
73        position_to_pos(RopeBuffer::cursor(self))
74    }
75
76    fn set_cursor(&mut self, pos: Pos) {
77        RopeBuffer::set_cursor(self, pos_to_position(pos));
78    }
79
80    fn byte_offset(&self, pos: Pos) -> usize {
81        let p = pos_to_position(pos);
82        // Sum byte lengths of every line strictly above `p.row` plus
83        // the trailing `\n`, then the col-byte-offset on `p.row`.
84        let mut byte = 0usize;
85        for r in 0..p.row.min(self.row_count()) {
86            byte += self.line(r).map(str::len).unwrap_or(0) + 1; // +1 for '\n'
87        }
88        if let Some(line) = self.line(p.row) {
89            byte += p.byte_offset(line);
90        }
91        byte
92    }
93
94    fn pos_at_byte(&self, byte: usize) -> Pos {
95        let mut remaining = byte;
96        for r in 0..self.row_count() {
97            let line = self.line(r).unwrap_or("");
98            let line_bytes = line.len();
99            // Each row contributes its bytes plus the trailing `\n`.
100            // `byte` indexing the trailing `\n` itself maps to the
101            // start of the next row (col 0).
102            if remaining <= line_bytes {
103                // Convert byte offset within line to char column.
104                let col = line[..remaining.min(line_bytes)].chars().count();
105                return Pos {
106                    line: r as u32,
107                    col: col as u32,
108                };
109            }
110            remaining -= line_bytes + 1;
111        }
112        // Past end → clamp to end of last line.
113        let last = self.row_count().saturating_sub(1);
114        let line = self.line(last).unwrap_or("");
115        Pos {
116            line: last as u32,
117            col: line.chars().count() as u32,
118        }
119    }
120}
121
122// ── Query ──────────────────────────────────────────────────────────
123
124impl Query for RopeBuffer {
125    fn line_count(&self) -> u32 {
126        self.row_count() as u32
127    }
128
129    fn line(&self, idx: u32) -> &str {
130        // SPEC: panic on OOB rather than silently return empty.
131        match RopeBuffer::line(self, idx as usize) {
132            Some(s) => s,
133            None => panic!(
134                "Query::line: index {idx} out of bounds (line_count = {})",
135                self.row_count()
136            ),
137        }
138    }
139
140    fn len_bytes(&self) -> usize {
141        // Sum of every line's bytes + a `\n` between them. Matches
142        // `as_string().len()` without allocating the join.
143        let n = self.row_count();
144        let mut total = 0usize;
145        for r in 0..n {
146            total += self.line(r).map(str::len).unwrap_or(0);
147        }
148        // n-1 separators between n lines (no trailing newline).
149        total + n.saturating_sub(1)
150    }
151
152    fn dirty_gen(&self) -> u64 {
153        RopeBuffer::dirty_gen(self)
154    }
155
156    fn slice(&self, range: core::ops::Range<Pos>) -> Cow<'_, str> {
157        let start = pos_to_position(range.start);
158        let end = pos_to_position(range.end);
159        if start >= end {
160            return Cow::Borrowed("");
161        }
162        // Single-line slice can borrow.
163        if start.row == end.row {
164            if let Some(line) = RopeBuffer::line(self, start.row) {
165                let lo = start.byte_offset(line).min(line.len());
166                let hi = end.byte_offset(line).min(line.len());
167                return Cow::Borrowed(&line[lo..hi]);
168            }
169            return Cow::Borrowed("");
170        }
171        // Multi-line: allocate.
172        let mut out = String::new();
173        for r in start.row..=end.row.min(self.row_count().saturating_sub(1)) {
174            let line = RopeBuffer::line(self, r).unwrap_or("");
175            if r == start.row {
176                let lo = start.byte_offset(line).min(line.len());
177                out.push_str(&line[lo..]);
178                out.push('\n');
179            } else if r == end.row {
180                let hi = end.byte_offset(line).min(line.len());
181                out.push_str(&line[..hi]);
182            } else {
183                out.push_str(line);
184                out.push('\n');
185            }
186        }
187        Cow::Owned(out)
188    }
189}
190
191// ── BufferEdit ─────────────────────────────────────────────────────
192
193impl BufferEdit for RopeBuffer {
194    fn insert_at(&mut self, pos: Pos, text: &str) {
195        let at = clamp_to_buf(self, pos_to_position(pos));
196        let _ = self.apply_edit(hjkl_buffer::Edit::InsertStr {
197            at,
198            text: text.to_string(),
199        });
200    }
201
202    fn delete_range(&mut self, range: core::ops::Range<Pos>) {
203        let start = clamp_to_buf(self, pos_to_position(range.start));
204        let end = clamp_to_buf(self, pos_to_position(range.end));
205        if start >= end {
206            return;
207        }
208        let _ = self.apply_edit(hjkl_buffer::Edit::DeleteRange {
209            start,
210            end,
211            kind: hjkl_buffer::MotionKind::Char,
212        });
213    }
214
215    fn replace_range(&mut self, range: core::ops::Range<Pos>, replacement: &str) {
216        let start = clamp_to_buf(self, pos_to_position(range.start));
217        let end = clamp_to_buf(self, pos_to_position(range.end));
218        if start >= end {
219            // Treat as pure insert at `start`.
220            let _ = self.apply_edit(hjkl_buffer::Edit::InsertStr {
221                at: start,
222                text: replacement.to_string(),
223            });
224            return;
225        }
226        let _ = self.apply_edit(hjkl_buffer::Edit::Replace {
227            start,
228            end,
229            with: replacement.to_string(),
230        });
231    }
232
233    fn replace_all(&mut self, text: &str) {
234        // Forward to the inherent in-tree fast path which rebuilds
235        // the line vector in one pass + bumps `dirty_gen`.
236        RopeBuffer::replace_all(self, text);
237    }
238}
239
240#[inline]
241fn clamp_to_buf(buf: &RopeBuffer, p: Position) -> Position {
242    buf.clamp_position(p)
243}
244
245// ── Search ─────────────────────────────────────────────────────────
246
247impl Search for RopeBuffer {
248    fn find_next(&self, from: Pos, pat: &Regex) -> Option<core::ops::Range<Pos>> {
249        let start = pos_to_position(from);
250        let total = self.row_count();
251        if total == 0 {
252            return None;
253        }
254        // Scan the from-row from `start.col` onward, then every row
255        // after, then wrap to rows before. SPEC: "first match
256        // at-or-after `from`". 0.0.37: wrap policy now lives on the
257        // engine's `SearchState::wrap_around` (see
258        // `DESIGN_33_METHOD_CLASSIFICATION.md` step 3); the trait
259        // impl always wraps and the engine's `search_*` free
260        // functions are responsible for honouring `wrapscan` by
261        // wrapping or not invoking the trait at all.
262        let wrap = true;
263        let from_line = RopeBuffer::line(self, start.row).unwrap_or("");
264        let from_byte = start.byte_offset(from_line).min(from_line.len());
265        if let Some(m) = pat.find_at(from_line, from_byte) {
266            return Some(byte_range_to_pos_range(
267                start.row,
268                m.start(),
269                start.row,
270                m.end(),
271                from_line,
272            ));
273        }
274        for offset in 1..total {
275            let row = start.row + offset;
276            if row >= total && !wrap {
277                break;
278            }
279            let row = row % total;
280            if !wrap && row <= start.row {
281                break;
282            }
283            let line = RopeBuffer::line(self, row).unwrap_or("");
284            if let Some(m) = pat.find(line) {
285                return Some(byte_range_to_pos_range(row, m.start(), row, m.end(), line));
286            }
287            if row == start.row {
288                break;
289            }
290        }
291        None
292    }
293
294    fn find_prev(&self, from: Pos, pat: &Regex) -> Option<core::ops::Range<Pos>> {
295        let start = pos_to_position(from);
296        let total = self.row_count();
297        if total == 0 {
298            return None;
299        }
300        // 0.0.37: wrap moved to engine SearchState; trait impl always wraps.
301        let wrap = true;
302        // Last match at-or-before `from`. We can't run the regex
303        // backwards, so iterate matches and pick the last one with
304        // start <= from-byte on the from-row, then walk previous rows
305        // taking the last match per row.
306        let from_line = RopeBuffer::line(self, start.row).unwrap_or("");
307        let from_byte = start.byte_offset(from_line).min(from_line.len());
308        let mut best: Option<(usize, usize)> = None;
309        for m in pat.find_iter(from_line) {
310            if m.start() <= from_byte {
311                best = Some((m.start(), m.end()));
312            } else {
313                break;
314            }
315        }
316        if let Some((s, e)) = best {
317            return Some(byte_range_to_pos_range(
318                start.row, s, start.row, e, from_line,
319            ));
320        }
321        for offset in 1..total {
322            // Walk backwards.
323            let row = if offset > start.row {
324                if !wrap {
325                    break;
326                }
327                total - (offset - start.row)
328            } else {
329                start.row - offset
330            };
331            if !wrap && row >= start.row {
332                break;
333            }
334            let line = RopeBuffer::line(self, row).unwrap_or("");
335            let last = pat.find_iter(line).last();
336            if let Some(m) = last {
337                return Some(byte_range_to_pos_range(row, m.start(), row, m.end(), line));
338            }
339            if row == start.row {
340                break;
341            }
342        }
343        None
344    }
345}
346
347#[inline]
348fn byte_range_to_pos_range(
349    s_row: usize,
350    s_byte: usize,
351    e_row: usize,
352    e_byte: usize,
353    line: &str,
354) -> core::ops::Range<Pos> {
355    let s_col = line[..s_byte.min(line.len())].chars().count();
356    let e_col = line[..e_byte.min(line.len())].chars().count();
357    Pos {
358        line: s_row as u32,
359        col: s_col as u32,
360    }..Pos {
361        line: e_row as u32,
362        col: e_col as u32,
363    }
364}
365
366// ── Buffer super-trait ─────────────────────────────────────────────
367
368impl Buffer for RopeBuffer {}
369
370// ── Fold provider ──────────────────────────────────────────────────
371
372/// [`FoldProvider`] adapter wrapping a `&hjkl_buffer::Buffer`. Lets
373/// engine call sites ask the buffer's fold storage about visible
374/// rows without reaching into `Buffer::next_visible_row` &c. directly.
375///
376/// Construct with [`BufferFoldProvider::new`]. Hosts that want to
377/// expose their own fold model (a separate fold tree, LSP-derived
378/// folding ranges, …) can implement `FoldProvider` against their own
379/// state and skip this adapter entirely.
380///
381/// Introduced in 0.0.32 (Patch C-β) as part of the fold-iteration
382/// relocation. Fold *storage* still lives on the buffer for
383/// `dirty_gen` / render-cache reasons; only the iteration API moved.
384pub struct BufferFoldProvider<'a> {
385    buffer: &'a RopeBuffer,
386}
387
388impl<'a> BufferFoldProvider<'a> {
389    pub fn new(buffer: &'a RopeBuffer) -> Self {
390        Self { buffer }
391    }
392}
393
394impl FoldProvider for BufferFoldProvider<'_> {
395    fn next_visible_row(&self, row: usize, _row_count: usize) -> Option<usize> {
396        // Buffer ignores the row_count hint — it knows its own size.
397        RopeBuffer::next_visible_row(self.buffer, row)
398    }
399
400    fn prev_visible_row(&self, row: usize) -> Option<usize> {
401        RopeBuffer::prev_visible_row(self.buffer, row)
402    }
403
404    fn is_row_hidden(&self, row: usize) -> bool {
405        RopeBuffer::is_row_hidden(self.buffer, row)
406    }
407
408    fn fold_at_row(&self, row: usize) -> Option<(usize, usize, bool)> {
409        let f = self.buffer.fold_at_row(row)?;
410        Some((f.start_row, f.end_row, f.closed))
411    }
412
413    // `apply` / `invalidate_range` use the trait's default no-op impl
414    // because `BufferFoldProvider` only borrows the buffer immutably.
415    // For fold mutation, use [`BufferFoldProviderMut`] instead.
416}
417
418/// Mutable [`FoldProvider`] adapter wrapping a `&mut hjkl_buffer::Buffer`.
419/// Engine call sites that need to dispatch a [`FoldOp`] (vim's `z…`
420/// keystrokes, the `:fold*` Ex commands, edit-pipeline invalidation)
421/// construct this on the fly from `&mut self.buffer` and call
422/// [`FoldProvider::apply`] / [`FoldProvider::invalidate_range`] on it.
423///
424/// Introduced in 0.0.38 (Patch C-δ.4) as part of routing fold mutation
425/// through the [`FoldProvider`] surface. Fold *storage* still lives
426/// on [`hjkl_buffer::Buffer`] for `dirty_gen` / render-cache reasons;
427/// only the dispatch path moved.
428pub struct BufferFoldProviderMut<'a> {
429    buffer: &'a mut RopeBuffer,
430}
431
432impl<'a> BufferFoldProviderMut<'a> {
433    pub fn new(buffer: &'a mut RopeBuffer) -> Self {
434        Self { buffer }
435    }
436}
437
438impl FoldProvider for BufferFoldProviderMut<'_> {
439    fn next_visible_row(&self, row: usize, _row_count: usize) -> Option<usize> {
440        RopeBuffer::next_visible_row(self.buffer, row)
441    }
442
443    fn prev_visible_row(&self, row: usize) -> Option<usize> {
444        RopeBuffer::prev_visible_row(self.buffer, row)
445    }
446
447    fn is_row_hidden(&self, row: usize) -> bool {
448        RopeBuffer::is_row_hidden(self.buffer, row)
449    }
450
451    fn fold_at_row(&self, row: usize) -> Option<(usize, usize, bool)> {
452        let f = self.buffer.fold_at_row(row)?;
453        Some((f.start_row, f.end_row, f.closed))
454    }
455
456    fn apply(&mut self, op: FoldOp) {
457        match op {
458            FoldOp::Add {
459                start_row,
460                end_row,
461                closed,
462            } => {
463                self.buffer.add_fold(start_row, end_row, closed);
464            }
465            FoldOp::RemoveAt(row) => {
466                self.buffer.remove_fold_at(row);
467            }
468            FoldOp::OpenAt(row) => {
469                self.buffer.open_fold_at(row);
470            }
471            FoldOp::CloseAt(row) => {
472                self.buffer.close_fold_at(row);
473            }
474            FoldOp::ToggleAt(row) => {
475                self.buffer.toggle_fold_at(row);
476            }
477            FoldOp::OpenAll => {
478                self.buffer.open_all_folds();
479            }
480            FoldOp::CloseAll => {
481                self.buffer.close_all_folds();
482            }
483            FoldOp::ClearAll => {
484                self.buffer.clear_all_folds();
485            }
486            FoldOp::Invalidate { start_row, end_row } => {
487                self.buffer.invalidate_folds_in_range(start_row, end_row);
488            }
489        }
490    }
491
492    fn invalidate_range(&mut self, start_row: usize, end_row: usize) {
493        self.buffer.invalidate_folds_in_range(start_row, end_row);
494    }
495}
496
497/// Owned-snapshot [`FoldProvider`] adapter. Carries a copy of the
498/// buffer's fold list (one `Vec<Fold>` clone — fold lists are tiny in
499/// practice) plus the buffer's `row_count`, so the call site can hold
500/// the snapshot for fold queries while passing `&mut hjkl_buffer::Buffer`
501/// to a motion function that needs cursor mutation.
502///
503/// Introduced in 0.0.40 (Patch C-δ.5) so the lifted motion fns can
504/// take `&dyn FoldProvider` separately from `&mut B: Cursor + Query`
505/// without the call site running into the immutable-vs-mutable
506/// borrow conflict that arises with [`BufferFoldProvider`] /
507/// [`BufferFoldProviderMut`] (both of which hold a buffer borrow).
508///
509/// The snapshot is read-only — `apply` and `invalidate_range` are
510/// no-ops (any fold mutation must go through the canonical
511/// [`BufferFoldProviderMut`] adapter against the live buffer).
512pub struct SnapshotFoldProvider {
513    folds: Vec<hjkl_buffer::Fold>,
514    row_count: usize,
515}
516
517impl SnapshotFoldProvider {
518    /// Snapshot the current fold list + row-count from `buffer`.
519    /// The snapshot is decoupled from the buffer's lifetime, so the
520    /// caller can immediately re-borrow the buffer mutably.
521    pub fn from_buffer(buffer: &RopeBuffer) -> Self {
522        Self {
523            folds: buffer.folds().to_vec(),
524            row_count: buffer.row_count(),
525        }
526    }
527
528    /// True iff `row` is hidden by any closed fold in the snapshot.
529    /// Mirrors [`hjkl_buffer::Buffer::is_row_hidden`] over the
530    /// snapshotted fold list.
531    fn snapshot_is_row_hidden(&self, row: usize) -> bool {
532        self.folds.iter().any(|f| f.hides(row))
533    }
534}
535
536impl FoldProvider for SnapshotFoldProvider {
537    fn next_visible_row(&self, row: usize, _row_count: usize) -> Option<usize> {
538        // Mirrors [`hjkl_buffer::Buffer::next_visible_row`]: walk
539        // forward, skipping closed-fold-hidden rows, stop at end.
540        let last = self.row_count.saturating_sub(1);
541        if last == 0 && row == 0 {
542            return None;
543        }
544        let mut r = row.checked_add(1)?;
545        while r <= last && self.snapshot_is_row_hidden(r) {
546            r += 1;
547        }
548        (r <= last).then_some(r)
549    }
550
551    fn prev_visible_row(&self, row: usize) -> Option<usize> {
552        // Mirrors [`hjkl_buffer::Buffer::prev_visible_row`].
553        let mut r = row.checked_sub(1)?;
554        while self.snapshot_is_row_hidden(r) {
555            r = r.checked_sub(1)?;
556        }
557        Some(r)
558    }
559
560    fn is_row_hidden(&self, row: usize) -> bool {
561        self.snapshot_is_row_hidden(row)
562    }
563
564    fn fold_at_row(&self, row: usize) -> Option<(usize, usize, bool)> {
565        self.folds
566            .iter()
567            .find(|f| f.contains(row))
568            .map(|f| (f.start_row, f.end_row, f.closed))
569    }
570
571    // `apply` / `invalidate_range` use the trait's default no-op impl.
572}
573
574// ── Tests ──────────────────────────────────────────────────────────
575
576#[cfg(test)]
577mod tests {
578    use super::*;
579
580    /// Compile-time check: the in-tree `hjkl_buffer::Buffer` satisfies
581    /// the SPEC `Buffer` super-trait (and therefore all four sub-traits).
582    /// If this stops compiling, the trait surface diverged from the
583    /// canonical impl — fix the impl, not this assertion.
584    #[test]
585    fn rope_buffer_implements_spec_buffer() {
586        fn assert_buffer<B: Buffer>() {}
587        fn assert_cursor<B: Cursor>() {}
588        fn assert_query<B: Query>() {}
589        fn assert_edit<B: BufferEdit>() {}
590        fn assert_search<B: Search>() {}
591        assert_buffer::<RopeBuffer>();
592        assert_cursor::<RopeBuffer>();
593        assert_query::<RopeBuffer>();
594        assert_edit::<RopeBuffer>();
595        assert_search::<RopeBuffer>();
596    }
597
598    #[test]
599    fn cursor_roundtrip() {
600        let mut b = RopeBuffer::from_str("hello\nworld");
601        Cursor::set_cursor(&mut b, Pos::new(1, 3));
602        assert_eq!(Cursor::cursor(&b), Pos::new(1, 3));
603    }
604
605    #[test]
606    fn query_line_count_and_line() {
607        let b = RopeBuffer::from_str("a\nb\nc");
608        assert_eq!(Query::line_count(&b), 3);
609        assert_eq!(Query::line(&b, 0), "a");
610        assert_eq!(Query::line(&b, 2), "c");
611    }
612
613    #[test]
614    fn query_len_bytes_matches_join() {
615        let b = RopeBuffer::from_str("foo\nbar\nbaz");
616        assert_eq!(Query::len_bytes(&b), b.as_string().len());
617    }
618
619    #[test]
620    fn query_slice_single_line_borrows() {
621        let b = RopeBuffer::from_str("hello world");
622        let s = Query::slice(&b, Pos::new(0, 0)..Pos::new(0, 5));
623        assert_eq!(&*s, "hello");
624        assert!(matches!(s, Cow::Borrowed(_)));
625    }
626
627    #[test]
628    fn query_slice_multiline_allocates() {
629        let b = RopeBuffer::from_str("ab\ncd\nef");
630        let s = Query::slice(&b, Pos::new(0, 1)..Pos::new(2, 1));
631        assert_eq!(&*s, "b\ncd\ne");
632        assert!(matches!(s, Cow::Owned(_)));
633    }
634
635    #[test]
636    fn cursor_byte_offset_and_inverse() {
637        let b = RopeBuffer::from_str("hello\nworld");
638        // Start of row 1 = 6 bytes ('h','e','l','l','o','\n').
639        let p = Pos::new(1, 0);
640        assert_eq!(Cursor::byte_offset(&b, p), 6);
641        assert_eq!(Cursor::pos_at_byte(&b, 6), p);
642        // Roundtrip mid-line.
643        let p2 = Pos::new(1, 3);
644        let off = Cursor::byte_offset(&b, p2);
645        assert_eq!(Cursor::pos_at_byte(&b, off), p2);
646    }
647
648    #[test]
649    fn buffer_edit_insert_delete_replace() {
650        let mut b = RopeBuffer::from_str("hello");
651        BufferEdit::insert_at(&mut b, Pos::new(0, 5), " world");
652        assert_eq!(b.as_string(), "hello world");
653        BufferEdit::delete_range(&mut b, Pos::new(0, 5)..Pos::new(0, 11));
654        assert_eq!(b.as_string(), "hello");
655        BufferEdit::replace_range(&mut b, Pos::new(0, 0)..Pos::new(0, 5), "HI");
656        assert_eq!(b.as_string(), "HI");
657    }
658
659    /// Default `BufferEdit::replace_all` impl forwards to
660    /// `replace_range(ORIGIN..MAX, text)`. Non-canonical backends that
661    /// don't override `replace_all` rely on this; locked in here with
662    /// a minimal mock that records the calls.
663    #[test]
664    fn buffer_edit_default_replace_all_routes_through_replace_range() {
665        struct MockBuf {
666            cursor: Pos,
667            lines: Vec<String>,
668            last_replace_range: Option<core::ops::Range<Pos>>,
669        }
670        impl Sealed for MockBuf {}
671        impl Cursor for MockBuf {
672            fn cursor(&self) -> Pos {
673                self.cursor
674            }
675            fn set_cursor(&mut self, p: Pos) {
676                self.cursor = p;
677            }
678            fn byte_offset(&self, _p: Pos) -> usize {
679                0
680            }
681            fn pos_at_byte(&self, _b: usize) -> Pos {
682                Pos::ORIGIN
683            }
684        }
685        impl Query for MockBuf {
686            fn line_count(&self) -> u32 {
687                self.lines.len() as u32
688            }
689            fn line(&self, idx: u32) -> &str {
690                &self.lines[idx as usize]
691            }
692            fn len_bytes(&self) -> usize {
693                0
694            }
695            fn slice(&self, _r: core::ops::Range<Pos>) -> Cow<'_, str> {
696                Cow::Borrowed("")
697            }
698        }
699        impl BufferEdit for MockBuf {
700            fn insert_at(&mut self, _p: Pos, _t: &str) {}
701            fn delete_range(&mut self, _r: core::ops::Range<Pos>) {}
702            fn replace_range(&mut self, range: core::ops::Range<Pos>, _t: &str) {
703                self.last_replace_range = Some(range);
704            }
705        }
706        impl Search for MockBuf {
707            fn find_next(&self, _f: Pos, _p: &Regex) -> Option<core::ops::Range<Pos>> {
708                None
709            }
710            fn find_prev(&self, _f: Pos, _p: &Regex) -> Option<core::ops::Range<Pos>> {
711                None
712            }
713        }
714        impl Buffer for MockBuf {}
715
716        let mut m = MockBuf {
717            cursor: Pos::ORIGIN,
718            lines: vec!["hi".into()],
719            last_replace_range: None,
720        };
721        BufferEdit::replace_all(&mut m, "new content");
722        let r = m
723            .last_replace_range
724            .expect("default impl must hit replace_range");
725        assert_eq!(r.start, Pos::ORIGIN);
726        assert_eq!(r.end.line, u32::MAX);
727        assert_eq!(r.end.col, u32::MAX);
728    }
729
730    #[test]
731    fn buffer_edit_replace_all_rebuilds_content() {
732        let mut b = RopeBuffer::from_str("hello\nworld");
733        Cursor::set_cursor(&mut b, Pos::new(1, 3));
734        BufferEdit::replace_all(&mut b, "alpha\nbeta\ngamma");
735        assert_eq!(b.as_string(), "alpha\nbeta\ngamma");
736        assert_eq!(Query::line_count(&b), 3);
737        // Cursor clamped to surviving content (`replace_all` invariant).
738        let c = Cursor::cursor(&b);
739        assert!((c.line as usize) < Query::line_count(&b) as usize);
740    }
741
742    #[test]
743    fn search_find_next_same_row() {
744        let b = RopeBuffer::from_str("abc def abc");
745        let pat = Regex::new("abc").unwrap();
746        let r = Search::find_next(&b, Pos::new(0, 0), &pat).unwrap();
747        assert_eq!(r, Pos::new(0, 0)..Pos::new(0, 3));
748        let r2 = Search::find_next(&b, Pos::new(0, 1), &pat).unwrap();
749        assert_eq!(r2, Pos::new(0, 8)..Pos::new(0, 11));
750    }
751
752    #[test]
753    fn search_find_next_wraps() {
754        let b = RopeBuffer::from_str("foo\nbar\nfoo");
755        // 0.0.37: wrap policy moved to engine `SearchState::wrap_around`.
756        // The trait impl always wraps; engine code that wants
757        // non-wrap semantics short-circuits before invoking the trait.
758        let pat = Regex::new("foo").unwrap();
759        // Starting on row 1: should find row 2's "foo".
760        let r = Search::find_next(&b, Pos::new(1, 0), &pat).unwrap();
761        assert_eq!(r, Pos::new(2, 0)..Pos::new(2, 3));
762    }
763
764    #[test]
765    fn search_find_prev_same_row() {
766        let b = RopeBuffer::from_str("abc def abc");
767        let pat = Regex::new("abc").unwrap();
768        let r = Search::find_prev(&b, Pos::new(0, 11), &pat).unwrap();
769        assert_eq!(r, Pos::new(0, 8)..Pos::new(0, 11));
770    }
771
772    #[test]
773    fn pos_position_roundtrip() {
774        let p = Pos::new(7, 3);
775        assert_eq!(position_to_pos(pos_to_position(p)), p);
776    }
777
778    // ── BufferFoldProviderMut dispatch (0.0.38, Patch C-δ.4) ───────
779
780    #[test]
781    fn fold_provider_mut_apply_add_open_close_toggle() {
782        let mut buf = RopeBuffer::from_str("a\nb\nc\nd\ne");
783        {
784            let mut p = BufferFoldProviderMut::new(&mut buf);
785            p.apply(FoldOp::Add {
786                start_row: 1,
787                end_row: 3,
788                closed: true,
789            });
790            assert_eq!(p.fold_at_row(2), Some((1, 3, true)));
791            p.apply(FoldOp::OpenAt(2));
792            assert_eq!(p.fold_at_row(2), Some((1, 3, false)));
793            p.apply(FoldOp::CloseAt(2));
794            assert_eq!(p.fold_at_row(2), Some((1, 3, true)));
795            p.apply(FoldOp::ToggleAt(2));
796            assert_eq!(p.fold_at_row(2), Some((1, 3, false)));
797        }
798        assert_eq!(buf.folds().len(), 1);
799    }
800
801    #[test]
802    fn fold_provider_mut_apply_open_close_clear_all() {
803        let mut buf = RopeBuffer::from_str("a\nb\nc\nd\ne");
804        buf.add_fold(0, 1, false);
805        buf.add_fold(2, 3, true);
806        {
807            let mut p = BufferFoldProviderMut::new(&mut buf);
808            p.apply(FoldOp::CloseAll);
809        }
810        assert!(buf.folds().iter().all(|f| f.closed));
811        {
812            let mut p = BufferFoldProviderMut::new(&mut buf);
813            p.apply(FoldOp::OpenAll);
814        }
815        assert!(buf.folds().iter().all(|f| !f.closed));
816        {
817            let mut p = BufferFoldProviderMut::new(&mut buf);
818            p.apply(FoldOp::ClearAll);
819        }
820        assert!(buf.folds().is_empty());
821    }
822
823    #[test]
824    fn fold_provider_mut_invalidate_range_drops_overlapping() {
825        let mut buf = RopeBuffer::from_str("a\nb\nc\nd\ne");
826        buf.add_fold(0, 1, true);
827        buf.add_fold(2, 3, true);
828        buf.add_fold(4, 4, true);
829        {
830            let mut p = BufferFoldProviderMut::new(&mut buf);
831            p.invalidate_range(2, 3);
832        }
833        let starts: Vec<usize> = buf.folds().iter().map(|f| f.start_row).collect();
834        assert_eq!(starts, vec![0, 4]);
835    }
836
837    #[test]
838    fn fold_provider_mut_apply_remove_at() {
839        let mut buf = RopeBuffer::from_str("a\nb\nc\nd\ne");
840        buf.add_fold(1, 3, true);
841        {
842            let mut p = BufferFoldProviderMut::new(&mut buf);
843            p.apply(FoldOp::RemoveAt(2));
844        }
845        assert!(buf.folds().is_empty());
846    }
847
848    #[test]
849    fn noop_fold_provider_apply_is_noop() {
850        // The default `apply` impl on the trait is a no-op; verify
851        // NoopFoldProvider inherits it without panicking.
852        let mut p = crate::types::NoopFoldProvider;
853        FoldProvider::apply(&mut p, FoldOp::OpenAll);
854        FoldProvider::invalidate_range(&mut p, 0, 5);
855        // Read methods unaffected.
856        assert!(!FoldProvider::is_row_hidden(&p, 3));
857    }
858}