Skip to main content

hjkl_engine/
buffer_impl.rs

1//! Canonical [`Buffer`] trait impl over [`hjkl_buffer::Buffer`].
2//!
3//! Wires the engine trait surface (`Cursor` / `Query` / `BufferEdit` /
4//! `Search`, sealed via [`crate::types::sealed::Sealed`]) onto the
5//! in-tree rope-backed buffer. Pos⇄Position conversion lives at this
6//! boundary — engine code (FSM, editor) keeps using `hjkl_buffer`'s
7//! concrete API directly until the motion / fold relocation lands;
8//! external trait users see the engine trait surface.
9//!
10//! # Why concrete-Editor today
11//!
12//! The trait surface here is 13 methods. The engine FSM today calls
13//! ~46 distinct methods on `hjkl_buffer::Buffer` — most of them are
14//! motion / fold / viewport helpers that don't belong on `Buffer`
15//! (they're computed over the buffer, not delegated to it). Generic-ifying
16//! `Editor<B: Buffer, H: Host>` therefore requires relocating those
17//! ~33 helpers from `hjkl-buffer` into `hjkl-engine` as free functions
18//! over `B: Cursor + Query`. That's a separate, multi-thousand-LOC
19//! patch tracked for the 0.1.0 cut.
20//!
21//! Until then this module ships the canonical impl + a compile-time
22//! assertion that `hjkl_buffer::Buffer` satisfies the trait, so
23//! downstream callers can write `fn f<B: hjkl_engine::Buffer>(…)`
24//! today and the engine's own `Editor` becomes generic over `B` in a
25//! follow-up patch without breaking the trait contract.
26
27use std::borrow::Cow;
28
29use hjkl_buffer::Buffer as RopeBuffer;
30use hjkl_buffer::Position;
31use regex::Regex;
32
33use crate::types::sealed::Sealed;
34use crate::types::{Buffer, BufferEdit, Cursor, FoldOp, FoldProvider, Pos, Query, Search};
35
36// ── Pos ⇄ Position conversion ──────────────────────────────────────
37
38/// Engine [`Pos`] → buffer [`Position`].
39///
40/// Engine `Pos` is `(line: u32, col: u32)` grapheme-indexed; buffer
41/// [`Position`] is `(row: usize, col: usize)` char-indexed. The two
42/// indexings happen to match for the in-tree rope today (graphemes
43/// without combining marks == chars); future grapheme-aware backends
44/// will need to thread a real grapheme→char map through this fn.
45#[inline]
46pub(crate) fn pos_to_position(p: Pos) -> Position {
47    Position {
48        row: p.line as usize,
49        col: p.col as usize,
50    }
51}
52
53/// Buffer [`Position`] → engine [`Pos`].
54#[inline]
55pub(crate) fn position_to_pos(p: Position) -> Pos {
56    Pos {
57        line: p.row as u32,
58        col: p.col as u32,
59    }
60}
61
62// ── Sealed marker ──────────────────────────────────────────────────
63
64impl Sealed for RopeBuffer {}
65
66// ── Cursor ─────────────────────────────────────────────────────────
67
68impl Cursor for RopeBuffer {
69    fn cursor(&self) -> Pos {
70        position_to_pos(RopeBuffer::cursor(self))
71    }
72
73    fn set_cursor(&mut self, pos: Pos) {
74        RopeBuffer::set_cursor(self, pos_to_position(pos));
75    }
76
77    fn byte_offset(&self, pos: Pos) -> usize {
78        let p = pos_to_position(pos);
79        // Sum byte lengths of every line strictly above `p.row` plus
80        // the trailing `\n`, then the col-byte-offset on `p.row`.
81        let mut byte = 0usize;
82        for r in 0..p.row.min(self.row_count()) {
83            byte += self.line(r).map(str::len).unwrap_or(0) + 1; // +1 for '\n'
84        }
85        if let Some(line) = self.line(p.row) {
86            byte += p.byte_offset(line);
87        }
88        byte
89    }
90
91    fn pos_at_byte(&self, byte: usize) -> Pos {
92        let mut remaining = byte;
93        for r in 0..self.row_count() {
94            let line = self.line(r).unwrap_or("");
95            let line_bytes = line.len();
96            // Each row contributes its bytes plus the trailing `\n`.
97            // `byte` indexing the trailing `\n` itself maps to the
98            // start of the next row (col 0).
99            if remaining <= line_bytes {
100                // Convert byte offset within line to char column.
101                let col = line[..remaining.min(line_bytes)].chars().count();
102                return Pos {
103                    line: r as u32,
104                    col: col as u32,
105                };
106            }
107            remaining -= line_bytes + 1;
108        }
109        // Past end → clamp to end of last line.
110        let last = self.row_count().saturating_sub(1);
111        let line = self.line(last).unwrap_or("");
112        Pos {
113            line: last as u32,
114            col: line.chars().count() as u32,
115        }
116    }
117}
118
119// ── Query ──────────────────────────────────────────────────────────
120
121impl Query for RopeBuffer {
122    fn line_count(&self) -> u32 {
123        self.row_count() as u32
124    }
125
126    fn line(&self, idx: u32) -> &str {
127        // SPEC: panic on OOB rather than silently return empty.
128        match RopeBuffer::line(self, idx as usize) {
129            Some(s) => s,
130            None => panic!(
131                "Query::line: index {idx} out of bounds (line_count = {})",
132                self.row_count()
133            ),
134        }
135    }
136
137    fn len_bytes(&self) -> usize {
138        // Sum of every line's bytes + a `\n` between them. Matches
139        // `as_string().len()` without allocating the join.
140        let n = self.row_count();
141        let mut total = 0usize;
142        for r in 0..n {
143            total += self.line(r).map(str::len).unwrap_or(0);
144        }
145        // n-1 separators between n lines (no trailing newline).
146        total + n.saturating_sub(1)
147    }
148
149    fn dirty_gen(&self) -> u64 {
150        RopeBuffer::dirty_gen(self)
151    }
152
153    fn slice(&self, range: core::ops::Range<Pos>) -> Cow<'_, str> {
154        let start = pos_to_position(range.start);
155        let end = pos_to_position(range.end);
156        if start >= end {
157            return Cow::Borrowed("");
158        }
159        // Single-line slice can borrow.
160        if start.row == end.row {
161            if let Some(line) = RopeBuffer::line(self, start.row) {
162                let lo = start.byte_offset(line).min(line.len());
163                let hi = end.byte_offset(line).min(line.len());
164                return Cow::Borrowed(&line[lo..hi]);
165            }
166            return Cow::Borrowed("");
167        }
168        // Multi-line: allocate.
169        let mut out = String::new();
170        for r in start.row..=end.row.min(self.row_count().saturating_sub(1)) {
171            let line = RopeBuffer::line(self, r).unwrap_or("");
172            if r == start.row {
173                let lo = start.byte_offset(line).min(line.len());
174                out.push_str(&line[lo..]);
175                out.push('\n');
176            } else if r == end.row {
177                let hi = end.byte_offset(line).min(line.len());
178                out.push_str(&line[..hi]);
179            } else {
180                out.push_str(line);
181                out.push('\n');
182            }
183        }
184        Cow::Owned(out)
185    }
186}
187
188// ── BufferEdit ─────────────────────────────────────────────────────
189
190impl BufferEdit for RopeBuffer {
191    fn insert_at(&mut self, pos: Pos, text: &str) {
192        let at = clamp_to_buf(self, pos_to_position(pos));
193        let _ = self.apply_edit(hjkl_buffer::Edit::InsertStr {
194            at,
195            text: text.to_string(),
196        });
197    }
198
199    fn delete_range(&mut self, range: core::ops::Range<Pos>) {
200        let start = clamp_to_buf(self, pos_to_position(range.start));
201        let end = clamp_to_buf(self, pos_to_position(range.end));
202        if start >= end {
203            return;
204        }
205        let _ = self.apply_edit(hjkl_buffer::Edit::DeleteRange {
206            start,
207            end,
208            kind: hjkl_buffer::MotionKind::Char,
209        });
210    }
211
212    fn replace_range(&mut self, range: core::ops::Range<Pos>, replacement: &str) {
213        let start = clamp_to_buf(self, pos_to_position(range.start));
214        let end = clamp_to_buf(self, pos_to_position(range.end));
215        if start >= end {
216            // Treat as pure insert at `start`.
217            let _ = self.apply_edit(hjkl_buffer::Edit::InsertStr {
218                at: start,
219                text: replacement.to_string(),
220            });
221            return;
222        }
223        let _ = self.apply_edit(hjkl_buffer::Edit::Replace {
224            start,
225            end,
226            with: replacement.to_string(),
227        });
228    }
229
230    fn replace_all(&mut self, text: &str) {
231        // Forward to the inherent in-tree fast path which rebuilds
232        // the line vector in one pass + bumps `dirty_gen`.
233        RopeBuffer::replace_all(self, text);
234    }
235}
236
237#[inline]
238fn clamp_to_buf(buf: &RopeBuffer, p: Position) -> Position {
239    buf.clamp_position(p)
240}
241
242// ── Search ─────────────────────────────────────────────────────────
243
244impl Search for RopeBuffer {
245    fn find_next(&self, from: Pos, pat: &Regex) -> Option<core::ops::Range<Pos>> {
246        let start = pos_to_position(from);
247        let total = self.row_count();
248        if total == 0 {
249            return None;
250        }
251        // Scan the from-row from `start.col` onward, then every row
252        // after, then wrap to rows before. SPEC: "first match
253        // at-or-after `from`". 0.0.37: wrap policy now lives on the
254        // engine's `SearchState::wrap_around` (see
255        // `DESIGN_33_METHOD_CLASSIFICATION.md` step 3); the trait
256        // impl always wraps and the engine's `search_*` free
257        // functions are responsible for honouring `wrapscan` by
258        // wrapping or not invoking the trait at all.
259        let wrap = true;
260        let from_line = RopeBuffer::line(self, start.row).unwrap_or("");
261        let from_byte = start.byte_offset(from_line).min(from_line.len());
262        if let Some(m) = pat.find_at(from_line, from_byte) {
263            return Some(byte_range_to_pos_range(
264                start.row,
265                m.start(),
266                start.row,
267                m.end(),
268                from_line,
269            ));
270        }
271        for offset in 1..total {
272            let row = start.row + offset;
273            if row >= total && !wrap {
274                break;
275            }
276            let row = row % total;
277            if !wrap && row <= start.row {
278                break;
279            }
280            let line = RopeBuffer::line(self, row).unwrap_or("");
281            if let Some(m) = pat.find(line) {
282                return Some(byte_range_to_pos_range(row, m.start(), row, m.end(), line));
283            }
284            if row == start.row {
285                break;
286            }
287        }
288        None
289    }
290
291    fn find_prev(&self, from: Pos, pat: &Regex) -> Option<core::ops::Range<Pos>> {
292        let start = pos_to_position(from);
293        let total = self.row_count();
294        if total == 0 {
295            return None;
296        }
297        // 0.0.37: wrap moved to engine SearchState; trait impl always wraps.
298        let wrap = true;
299        // Last match at-or-before `from`. We can't run the regex
300        // backwards, so iterate matches and pick the last one with
301        // start <= from-byte on the from-row, then walk previous rows
302        // taking the last match per row.
303        let from_line = RopeBuffer::line(self, start.row).unwrap_or("");
304        let from_byte = start.byte_offset(from_line).min(from_line.len());
305        let mut best: Option<(usize, usize)> = None;
306        for m in pat.find_iter(from_line) {
307            if m.start() <= from_byte {
308                best = Some((m.start(), m.end()));
309            } else {
310                break;
311            }
312        }
313        if let Some((s, e)) = best {
314            return Some(byte_range_to_pos_range(
315                start.row, s, start.row, e, from_line,
316            ));
317        }
318        for offset in 1..total {
319            // Walk backwards.
320            let row = if offset > start.row {
321                if !wrap {
322                    break;
323                }
324                total - (offset - start.row)
325            } else {
326                start.row - offset
327            };
328            if !wrap && row >= start.row {
329                break;
330            }
331            let line = RopeBuffer::line(self, row).unwrap_or("");
332            let last = pat.find_iter(line).last();
333            if let Some(m) = last {
334                return Some(byte_range_to_pos_range(row, m.start(), row, m.end(), line));
335            }
336            if row == start.row {
337                break;
338            }
339        }
340        None
341    }
342}
343
344#[inline]
345fn byte_range_to_pos_range(
346    s_row: usize,
347    s_byte: usize,
348    e_row: usize,
349    e_byte: usize,
350    line: &str,
351) -> core::ops::Range<Pos> {
352    let s_col = line[..s_byte.min(line.len())].chars().count();
353    let e_col = line[..e_byte.min(line.len())].chars().count();
354    Pos {
355        line: s_row as u32,
356        col: s_col as u32,
357    }..Pos {
358        line: e_row as u32,
359        col: e_col as u32,
360    }
361}
362
363// ── Buffer super-trait ─────────────────────────────────────────────
364
365impl Buffer for RopeBuffer {}
366
367// ── Fold provider ──────────────────────────────────────────────────
368
369/// [`FoldProvider`] adapter wrapping a `&hjkl_buffer::Buffer`. Lets
370/// engine call sites ask the buffer's fold storage about visible
371/// rows without reaching into `Buffer::next_visible_row` &c. directly.
372///
373/// Construct with [`BufferFoldProvider::new`]. Hosts that want to
374/// expose their own fold model (a separate fold tree, LSP-derived
375/// folding ranges, …) can implement `FoldProvider` against their own
376/// state and skip this adapter entirely.
377///
378/// Introduced in 0.0.32 (Patch C-β) as part of the fold-iteration
379/// relocation. Fold *storage* still lives on the buffer for
380/// `dirty_gen` / render-cache reasons; only the iteration API moved.
381pub struct BufferFoldProvider<'a> {
382    buffer: &'a RopeBuffer,
383}
384
385impl<'a> BufferFoldProvider<'a> {
386    pub fn new(buffer: &'a RopeBuffer) -> Self {
387        Self { buffer }
388    }
389}
390
391impl FoldProvider for BufferFoldProvider<'_> {
392    fn next_visible_row(&self, row: usize, _row_count: usize) -> Option<usize> {
393        // Buffer ignores the row_count hint — it knows its own size.
394        RopeBuffer::next_visible_row(self.buffer, row)
395    }
396
397    fn prev_visible_row(&self, row: usize) -> Option<usize> {
398        RopeBuffer::prev_visible_row(self.buffer, row)
399    }
400
401    fn is_row_hidden(&self, row: usize) -> bool {
402        RopeBuffer::is_row_hidden(self.buffer, row)
403    }
404
405    fn fold_at_row(&self, row: usize) -> Option<(usize, usize, bool)> {
406        let f = self.buffer.fold_at_row(row)?;
407        Some((f.start_row, f.end_row, f.closed))
408    }
409
410    // `apply` / `invalidate_range` use the trait's default no-op impl
411    // because `BufferFoldProvider` only borrows the buffer immutably.
412    // For fold mutation, use [`BufferFoldProviderMut`] instead.
413}
414
415/// Mutable [`FoldProvider`] adapter wrapping a `&mut hjkl_buffer::Buffer`.
416/// Engine call sites that need to dispatch a [`FoldOp`] (vim's `z…`
417/// keystrokes, the `:fold*` Ex commands, edit-pipeline invalidation)
418/// construct this on the fly from `&mut self.buffer` and call
419/// [`FoldProvider::apply`] / [`FoldProvider::invalidate_range`] on it.
420///
421/// Introduced in 0.0.38 (Patch C-δ.4) as part of routing fold mutation
422/// through the [`FoldProvider`] surface. Fold *storage* still lives
423/// on [`hjkl_buffer::Buffer`] for `dirty_gen` / render-cache reasons;
424/// only the dispatch path moved.
425pub struct BufferFoldProviderMut<'a> {
426    buffer: &'a mut RopeBuffer,
427}
428
429impl<'a> BufferFoldProviderMut<'a> {
430    pub fn new(buffer: &'a mut RopeBuffer) -> Self {
431        Self { buffer }
432    }
433}
434
435impl FoldProvider for BufferFoldProviderMut<'_> {
436    fn next_visible_row(&self, row: usize, _row_count: usize) -> Option<usize> {
437        RopeBuffer::next_visible_row(self.buffer, row)
438    }
439
440    fn prev_visible_row(&self, row: usize) -> Option<usize> {
441        RopeBuffer::prev_visible_row(self.buffer, row)
442    }
443
444    fn is_row_hidden(&self, row: usize) -> bool {
445        RopeBuffer::is_row_hidden(self.buffer, row)
446    }
447
448    fn fold_at_row(&self, row: usize) -> Option<(usize, usize, bool)> {
449        let f = self.buffer.fold_at_row(row)?;
450        Some((f.start_row, f.end_row, f.closed))
451    }
452
453    fn apply(&mut self, op: FoldOp) {
454        match op {
455            FoldOp::Add {
456                start_row,
457                end_row,
458                closed,
459            } => {
460                self.buffer.add_fold(start_row, end_row, closed);
461            }
462            FoldOp::RemoveAt(row) => {
463                self.buffer.remove_fold_at(row);
464            }
465            FoldOp::OpenAt(row) => {
466                self.buffer.open_fold_at(row);
467            }
468            FoldOp::CloseAt(row) => {
469                self.buffer.close_fold_at(row);
470            }
471            FoldOp::ToggleAt(row) => {
472                self.buffer.toggle_fold_at(row);
473            }
474            FoldOp::OpenAll => {
475                self.buffer.open_all_folds();
476            }
477            FoldOp::CloseAll => {
478                self.buffer.close_all_folds();
479            }
480            FoldOp::ClearAll => {
481                self.buffer.clear_all_folds();
482            }
483            FoldOp::Invalidate { start_row, end_row } => {
484                self.buffer.invalidate_folds_in_range(start_row, end_row);
485            }
486        }
487    }
488
489    fn invalidate_range(&mut self, start_row: usize, end_row: usize) {
490        self.buffer.invalidate_folds_in_range(start_row, end_row);
491    }
492}
493
494/// Owned-snapshot [`FoldProvider`] adapter. Carries a copy of the
495/// buffer's fold list (one `Vec<Fold>` clone — fold lists are tiny in
496/// practice) plus the buffer's `row_count`, so the call site can hold
497/// the snapshot for fold queries while passing `&mut hjkl_buffer::Buffer`
498/// to a motion function that needs cursor mutation.
499///
500/// Introduced in 0.0.40 (Patch C-δ.5) so the lifted motion fns can
501/// take `&dyn FoldProvider` separately from `&mut B: Cursor + Query`
502/// without the call site running into the immutable-vs-mutable
503/// borrow conflict that arises with [`BufferFoldProvider`] /
504/// [`BufferFoldProviderMut`] (both of which hold a buffer borrow).
505///
506/// The snapshot is read-only — `apply` and `invalidate_range` are
507/// no-ops (any fold mutation must go through the canonical
508/// [`BufferFoldProviderMut`] adapter against the live buffer).
509pub struct SnapshotFoldProvider {
510    folds: Vec<hjkl_buffer::Fold>,
511    row_count: usize,
512}
513
514impl SnapshotFoldProvider {
515    /// Snapshot the current fold list + row-count from `buffer`.
516    /// The snapshot is decoupled from the buffer's lifetime, so the
517    /// caller can immediately re-borrow the buffer mutably.
518    pub fn from_buffer(buffer: &RopeBuffer) -> Self {
519        Self {
520            folds: buffer.folds().to_vec(),
521            row_count: buffer.row_count(),
522        }
523    }
524
525    /// True iff `row` is hidden by any closed fold in the snapshot.
526    /// Mirrors [`hjkl_buffer::Buffer::is_row_hidden`] over the
527    /// snapshotted fold list.
528    fn snapshot_is_row_hidden(&self, row: usize) -> bool {
529        self.folds.iter().any(|f| f.hides(row))
530    }
531}
532
533impl FoldProvider for SnapshotFoldProvider {
534    fn next_visible_row(&self, row: usize, _row_count: usize) -> Option<usize> {
535        // Mirrors [`hjkl_buffer::Buffer::next_visible_row`]: walk
536        // forward, skipping closed-fold-hidden rows, stop at end.
537        let last = self.row_count.saturating_sub(1);
538        if last == 0 && row == 0 {
539            return None;
540        }
541        let mut r = row.checked_add(1)?;
542        while r <= last && self.snapshot_is_row_hidden(r) {
543            r += 1;
544        }
545        (r <= last).then_some(r)
546    }
547
548    fn prev_visible_row(&self, row: usize) -> Option<usize> {
549        // Mirrors [`hjkl_buffer::Buffer::prev_visible_row`].
550        let mut r = row.checked_sub(1)?;
551        while self.snapshot_is_row_hidden(r) {
552            r = r.checked_sub(1)?;
553        }
554        Some(r)
555    }
556
557    fn is_row_hidden(&self, row: usize) -> bool {
558        self.snapshot_is_row_hidden(row)
559    }
560
561    fn fold_at_row(&self, row: usize) -> Option<(usize, usize, bool)> {
562        self.folds
563            .iter()
564            .find(|f| f.contains(row))
565            .map(|f| (f.start_row, f.end_row, f.closed))
566    }
567
568    // `apply` / `invalidate_range` use the trait's default no-op impl.
569}
570
571// ── Tests ──────────────────────────────────────────────────────────
572
573#[cfg(test)]
574mod tests {
575    use super::*;
576
577    /// Compile-time check: the in-tree `hjkl_buffer::Buffer` satisfies
578    /// the SPEC `Buffer` super-trait (and therefore all four sub-traits).
579    /// If this stops compiling, the trait surface diverged from the
580    /// canonical impl — fix the impl, not this assertion.
581    #[test]
582    fn rope_buffer_implements_spec_buffer() {
583        fn assert_buffer<B: Buffer>() {}
584        fn assert_cursor<B: Cursor>() {}
585        fn assert_query<B: Query>() {}
586        fn assert_edit<B: BufferEdit>() {}
587        fn assert_search<B: Search>() {}
588        assert_buffer::<RopeBuffer>();
589        assert_cursor::<RopeBuffer>();
590        assert_query::<RopeBuffer>();
591        assert_edit::<RopeBuffer>();
592        assert_search::<RopeBuffer>();
593    }
594
595    #[test]
596    fn cursor_roundtrip() {
597        let mut b = RopeBuffer::from_str("hello\nworld");
598        Cursor::set_cursor(&mut b, Pos::new(1, 3));
599        assert_eq!(Cursor::cursor(&b), Pos::new(1, 3));
600    }
601
602    #[test]
603    fn query_line_count_and_line() {
604        let b = RopeBuffer::from_str("a\nb\nc");
605        assert_eq!(Query::line_count(&b), 3);
606        assert_eq!(Query::line(&b, 0), "a");
607        assert_eq!(Query::line(&b, 2), "c");
608    }
609
610    #[test]
611    fn query_len_bytes_matches_join() {
612        let b = RopeBuffer::from_str("foo\nbar\nbaz");
613        assert_eq!(Query::len_bytes(&b), b.as_string().len());
614    }
615
616    #[test]
617    fn query_slice_single_line_borrows() {
618        let b = RopeBuffer::from_str("hello world");
619        let s = Query::slice(&b, Pos::new(0, 0)..Pos::new(0, 5));
620        assert_eq!(&*s, "hello");
621        assert!(matches!(s, Cow::Borrowed(_)));
622    }
623
624    #[test]
625    fn query_slice_multiline_allocates() {
626        let b = RopeBuffer::from_str("ab\ncd\nef");
627        let s = Query::slice(&b, Pos::new(0, 1)..Pos::new(2, 1));
628        assert_eq!(&*s, "b\ncd\ne");
629        assert!(matches!(s, Cow::Owned(_)));
630    }
631
632    #[test]
633    fn cursor_byte_offset_and_inverse() {
634        let b = RopeBuffer::from_str("hello\nworld");
635        // Start of row 1 = 6 bytes ('h','e','l','l','o','\n').
636        let p = Pos::new(1, 0);
637        assert_eq!(Cursor::byte_offset(&b, p), 6);
638        assert_eq!(Cursor::pos_at_byte(&b, 6), p);
639        // Roundtrip mid-line.
640        let p2 = Pos::new(1, 3);
641        let off = Cursor::byte_offset(&b, p2);
642        assert_eq!(Cursor::pos_at_byte(&b, off), p2);
643    }
644
645    #[test]
646    fn buffer_edit_insert_delete_replace() {
647        let mut b = RopeBuffer::from_str("hello");
648        BufferEdit::insert_at(&mut b, Pos::new(0, 5), " world");
649        assert_eq!(b.as_string(), "hello world");
650        BufferEdit::delete_range(&mut b, Pos::new(0, 5)..Pos::new(0, 11));
651        assert_eq!(b.as_string(), "hello");
652        BufferEdit::replace_range(&mut b, Pos::new(0, 0)..Pos::new(0, 5), "HI");
653        assert_eq!(b.as_string(), "HI");
654    }
655
656    /// Default `BufferEdit::replace_all` impl forwards to
657    /// `replace_range(ORIGIN..MAX, text)`. Non-canonical backends that
658    /// don't override `replace_all` rely on this; locked in here with
659    /// a minimal mock that records the calls.
660    #[test]
661    fn buffer_edit_default_replace_all_routes_through_replace_range() {
662        struct MockBuf {
663            cursor: Pos,
664            lines: Vec<String>,
665            last_replace_range: Option<core::ops::Range<Pos>>,
666        }
667        impl Sealed for MockBuf {}
668        impl Cursor for MockBuf {
669            fn cursor(&self) -> Pos {
670                self.cursor
671            }
672            fn set_cursor(&mut self, p: Pos) {
673                self.cursor = p;
674            }
675            fn byte_offset(&self, _p: Pos) -> usize {
676                0
677            }
678            fn pos_at_byte(&self, _b: usize) -> Pos {
679                Pos::ORIGIN
680            }
681        }
682        impl Query for MockBuf {
683            fn line_count(&self) -> u32 {
684                self.lines.len() as u32
685            }
686            fn line(&self, idx: u32) -> &str {
687                &self.lines[idx as usize]
688            }
689            fn len_bytes(&self) -> usize {
690                0
691            }
692            fn slice(&self, _r: core::ops::Range<Pos>) -> Cow<'_, str> {
693                Cow::Borrowed("")
694            }
695        }
696        impl BufferEdit for MockBuf {
697            fn insert_at(&mut self, _p: Pos, _t: &str) {}
698            fn delete_range(&mut self, _r: core::ops::Range<Pos>) {}
699            fn replace_range(&mut self, range: core::ops::Range<Pos>, _t: &str) {
700                self.last_replace_range = Some(range);
701            }
702        }
703        impl Search for MockBuf {
704            fn find_next(&self, _f: Pos, _p: &Regex) -> Option<core::ops::Range<Pos>> {
705                None
706            }
707            fn find_prev(&self, _f: Pos, _p: &Regex) -> Option<core::ops::Range<Pos>> {
708                None
709            }
710        }
711        impl Buffer for MockBuf {}
712
713        let mut m = MockBuf {
714            cursor: Pos::ORIGIN,
715            lines: vec!["hi".into()],
716            last_replace_range: None,
717        };
718        BufferEdit::replace_all(&mut m, "new content");
719        let r = m
720            .last_replace_range
721            .expect("default impl must hit replace_range");
722        assert_eq!(r.start, Pos::ORIGIN);
723        assert_eq!(r.end.line, u32::MAX);
724        assert_eq!(r.end.col, u32::MAX);
725    }
726
727    #[test]
728    fn buffer_edit_replace_all_rebuilds_content() {
729        let mut b = RopeBuffer::from_str("hello\nworld");
730        Cursor::set_cursor(&mut b, Pos::new(1, 3));
731        BufferEdit::replace_all(&mut b, "alpha\nbeta\ngamma");
732        assert_eq!(b.as_string(), "alpha\nbeta\ngamma");
733        assert_eq!(Query::line_count(&b), 3);
734        // Cursor clamped to surviving content (`replace_all` invariant).
735        let c = Cursor::cursor(&b);
736        assert!((c.line as usize) < Query::line_count(&b) as usize);
737    }
738
739    #[test]
740    fn search_find_next_same_row() {
741        let b = RopeBuffer::from_str("abc def abc");
742        let pat = Regex::new("abc").unwrap();
743        let r = Search::find_next(&b, Pos::new(0, 0), &pat).unwrap();
744        assert_eq!(r, Pos::new(0, 0)..Pos::new(0, 3));
745        let r2 = Search::find_next(&b, Pos::new(0, 1), &pat).unwrap();
746        assert_eq!(r2, Pos::new(0, 8)..Pos::new(0, 11));
747    }
748
749    #[test]
750    fn search_find_next_wraps() {
751        let b = RopeBuffer::from_str("foo\nbar\nfoo");
752        // 0.0.37: wrap policy moved to engine `SearchState::wrap_around`.
753        // The trait impl always wraps; engine code that wants
754        // non-wrap semantics short-circuits before invoking the trait.
755        let pat = Regex::new("foo").unwrap();
756        // Starting on row 1: should find row 2's "foo".
757        let r = Search::find_next(&b, Pos::new(1, 0), &pat).unwrap();
758        assert_eq!(r, Pos::new(2, 0)..Pos::new(2, 3));
759    }
760
761    #[test]
762    fn search_find_prev_same_row() {
763        let b = RopeBuffer::from_str("abc def abc");
764        let pat = Regex::new("abc").unwrap();
765        let r = Search::find_prev(&b, Pos::new(0, 11), &pat).unwrap();
766        assert_eq!(r, Pos::new(0, 8)..Pos::new(0, 11));
767    }
768
769    #[test]
770    fn pos_position_roundtrip() {
771        let p = Pos::new(7, 3);
772        assert_eq!(position_to_pos(pos_to_position(p)), p);
773    }
774
775    // ── BufferFoldProviderMut dispatch (0.0.38, Patch C-δ.4) ───────
776
777    #[test]
778    fn fold_provider_mut_apply_add_open_close_toggle() {
779        let mut buf = RopeBuffer::from_str("a\nb\nc\nd\ne");
780        {
781            let mut p = BufferFoldProviderMut::new(&mut buf);
782            p.apply(FoldOp::Add {
783                start_row: 1,
784                end_row: 3,
785                closed: true,
786            });
787            assert_eq!(p.fold_at_row(2), Some((1, 3, true)));
788            p.apply(FoldOp::OpenAt(2));
789            assert_eq!(p.fold_at_row(2), Some((1, 3, false)));
790            p.apply(FoldOp::CloseAt(2));
791            assert_eq!(p.fold_at_row(2), Some((1, 3, true)));
792            p.apply(FoldOp::ToggleAt(2));
793            assert_eq!(p.fold_at_row(2), Some((1, 3, false)));
794        }
795        assert_eq!(buf.folds().len(), 1);
796    }
797
798    #[test]
799    fn fold_provider_mut_apply_open_close_clear_all() {
800        let mut buf = RopeBuffer::from_str("a\nb\nc\nd\ne");
801        buf.add_fold(0, 1, false);
802        buf.add_fold(2, 3, true);
803        {
804            let mut p = BufferFoldProviderMut::new(&mut buf);
805            p.apply(FoldOp::CloseAll);
806        }
807        assert!(buf.folds().iter().all(|f| f.closed));
808        {
809            let mut p = BufferFoldProviderMut::new(&mut buf);
810            p.apply(FoldOp::OpenAll);
811        }
812        assert!(buf.folds().iter().all(|f| !f.closed));
813        {
814            let mut p = BufferFoldProviderMut::new(&mut buf);
815            p.apply(FoldOp::ClearAll);
816        }
817        assert!(buf.folds().is_empty());
818    }
819
820    #[test]
821    fn fold_provider_mut_invalidate_range_drops_overlapping() {
822        let mut buf = RopeBuffer::from_str("a\nb\nc\nd\ne");
823        buf.add_fold(0, 1, true);
824        buf.add_fold(2, 3, true);
825        buf.add_fold(4, 4, true);
826        {
827            let mut p = BufferFoldProviderMut::new(&mut buf);
828            p.invalidate_range(2, 3);
829        }
830        let starts: Vec<usize> = buf.folds().iter().map(|f| f.start_row).collect();
831        assert_eq!(starts, vec![0, 4]);
832    }
833
834    #[test]
835    fn fold_provider_mut_apply_remove_at() {
836        let mut buf = RopeBuffer::from_str("a\nb\nc\nd\ne");
837        buf.add_fold(1, 3, true);
838        {
839            let mut p = BufferFoldProviderMut::new(&mut buf);
840            p.apply(FoldOp::RemoveAt(2));
841        }
842        assert!(buf.folds().is_empty());
843    }
844
845    #[test]
846    fn noop_fold_provider_apply_is_noop() {
847        // The default `apply` impl on the trait is a no-op; verify
848        // NoopFoldProvider inherits it without panicking.
849        let mut p = crate::types::NoopFoldProvider;
850        FoldProvider::apply(&mut p, FoldOp::OpenAll);
851        FoldProvider::invalidate_range(&mut p, 0, 5);
852        // Read methods unaffected.
853        assert!(!FoldProvider::is_row_hidden(&p, 3));
854    }
855}