1use std::{
2 collections::HashMap,
3 ops::{ControlFlow, Range},
4 sync::{LazyLock, Mutex},
5 time::{Duration, Instant},
6};
7
8use duat_core::{
9 Ranges,
10 buffer::{Buffer, BufferParts, BufferTracker, Change, PerBuffer},
11 context::{self, Handle},
12 data::Pass,
13 form::{self, FormId},
14 hook::{self, BufferUpdated},
15 lender::Lender,
16 opts::PrintOpts,
17 text::{Bytes, Point, RegexHaystack, Tagger},
18};
19use duat_filetype::{FileType, PassFileType};
20use tree_sitter::{
21 InputEdit, Node, ParseOptions, ParseState, Parser as TsParser, Point as TsPoint, QueryCapture,
22 QueryCursor, QueryMatch, QueryProperty, Range as TsRange, StreamingIterator, TextProvider,
23};
24
25use crate::{LangParts, Queries, lang_parts_of, query_from_path, tree::Trees};
26
27const PARSE_TIMEOUT: Duration = Duration::from_millis(3);
28static TRACKER: BufferTracker = BufferTracker::new();
29static PARSERS: PerBuffer<Parser> = PerBuffer::new();
30
31pub(crate) fn add_parser_hook() {
32 fn async_parse(
33 pa: &mut Pass,
34 handle: &Handle,
35 printed_lines: Vec<Range<usize>>,
36 is_queued: bool,
37 ) -> bool {
38 if let Some(filetype) = handle.filetype(pa)
39 && let Some((parser, buf)) = PARSERS.write(pa, handle)
40 && parser.lang_parts.0 == filetype
41 {
42 if parser.is_parsing && !is_queued {
43 return true;
44 }
45
46 parser.is_parsing = true;
47
48 let visible_ranges = get_visible_ranges(&printed_lines);
49 let mut parts = TRACKER.parts(buf).unwrap();
50
51 apply_changes(&parts, parser);
52
53 if is_queued && parts.changes.len() > 0 {
56 let printed_lines = handle.printed_line_ranges(pa);
57 return async_parse(pa, handle, printed_lines, is_queued);
58 }
59
60 if !parser.parse(&mut parts, &visible_ranges, Some(Instant::now()), handle) {
61 let handle = handle.clone();
62 let printed_lines = printed_lines.clone();
63 context::queue(move |pa| _ = async_parse(pa, &handle, printed_lines, true))
64 } else {
65 parser.is_parsing = false;
66 }
67
68 for range in parts
69 .ranges_to_update
70 .select_from(printed_lines.iter().cloned())
71 {
72 let range = range.start..range.end + 1;
73 parts.tags.remove_excl(ts_tagger(), range.clone());
74 parser.highlight(range.clone(), &mut parts);
75 parts.ranges_to_update.update_on([range]);
76 }
77
78 true
79 } else {
80 false
81 }
82 }
83
84 hook::add::<BufferUpdated>(|pa, handle| {
85 let printed_lines = handle.printed_line_ranges(pa);
86 if async_parse(pa, handle, printed_lines.clone(), false) {
87 return;
88 }
89
90 let Some(filetype) = handle.read(pa).filetype() else {
91 return;
92 };
93
94 if let Some(lang_parts) = lang_parts_of(filetype, handle) {
95 let len_bytes = handle.text(pa).len().byte();
96
97 let mut parser = TsParser::new();
98 parser.set_language(lang_parts.1).unwrap();
99
100 TRACKER.register_buffer(handle.write(pa));
101 PARSERS.register(pa, handle, Parser {
102 parser,
103 trees: Trees::new([Ranges::new(0..len_bytes)]),
104 lang_parts,
105 forms: forms_from_lang_parts(lang_parts),
106 injections: Vec::new(),
107 ranges_to_inject: Ranges::new(0..len_bytes),
108 is_parsing: false,
109 });
110
111 async_parse(pa, handle, printed_lines.clone(), false);
112 }
113 });
114}
115
116pub struct Parser {
117 parser: TsParser,
118 trees: Trees,
119 lang_parts: LangParts<'static>,
120 forms: &'static [(FormId, u8)],
121 ranges_to_inject: Ranges,
122 injections: Vec<Parser>,
123 is_parsing: bool,
124}
125
126impl Parser {
127 pub fn root_node(&self) -> Node<'_> {
129 let tree = self.trees.iter().next().unwrap();
130 tree.ts_tree.as_ref().unwrap().root_node()
131 }
132
133 fn parse(
134 &mut self,
135 parts: &mut BufferParts,
136 visible_ranges: &[Range<usize>],
137 start: Option<Instant>,
138 handle: &Handle,
139 ) -> bool {
140 let mut parsed_at_least_one_region = false;
141
142 for range in visible_ranges.iter() {
143 let Some(parsed_a_tree) = self.parse_trees(range.clone(), parts, start) else {
144 return false;
145 };
146
147 parsed_at_least_one_region |= parsed_a_tree;
148 }
149
150 if parsed_at_least_one_region {
151 self.ranges_to_inject.add(0..parts.bytes.len().byte());
152 }
153
154 let ranges_to_inject = visible_ranges
155 .iter()
156 .flat_map(|range| self.ranges_to_inject.iter_over(range.clone()))
157 .fold(Vec::<Range<usize>>::new(), |mut ranges, range| {
158 match ranges.last_mut() {
159 Some(last) if last.end == range.start => last.end = range.end,
160 _ => ranges.push(range),
161 }
162 ranges
163 });
164
165 for range in ranges_to_inject {
166 self.inject(range, parts, handle);
167 if must_yield(start) {
168 return false;
169 }
170 }
171
172 for injection in self.injections.iter_mut() {
173 if !injection.parse(parts, visible_ranges, start, handle) {
174 return false;
175 }
176 }
177
178 true
179 }
180
181 fn parse_trees(
182 &mut self,
183 range: Range<usize>,
184 parts: &mut BufferParts,
185 start: Option<Instant>,
186 ) -> Option<bool> {
187 let mut callback = |_: &ParseState| match must_yield(start) {
188 true => ControlFlow::Break(()),
189 false => ControlFlow::Continue(()),
190 };
191
192 let ts_range = |range: Range<usize>| TsRange {
193 start_byte: range.start,
194 end_byte: range.end,
195 start_point: ts_point(parts.bytes.point_at_byte(range.start), parts.bytes),
196 end_point: ts_point(parts.bytes.point_at_byte(range.end), parts.bytes),
197 };
198
199 let mut parsed_at_least_one_region = false;
200 let mut parsing_failed = false;
201
202 for (_, tree) in self.trees.intersecting_mut(range.clone()) {
203 if !tree.needs_parse {
204 continue;
205 }
206
207 if tree.region.len() == 1 {
209 let ts_range = ts_range(tree.region.iter().next().unwrap());
210 self.parser.set_included_ranges(&[ts_range]).unwrap();
211 } else {
212 let ts_ranges: Vec<_> = tree.region.iter().map(ts_range).collect();
213 self.parser.set_included_ranges(&ts_ranges).unwrap();
214 }
215
216 let Some(new_ts_tree) = self.parser.parse_with_options(
217 &mut parser_fn(parts.bytes),
218 tree.ts_tree.as_ref(),
219 Some(ParseOptions::new().progress_callback(&mut callback)),
220 ) else {
221 parsing_failed = true;
222 break;
223 };
224
225 if let Some(ts_tree) = tree.ts_tree.as_mut() {
226 parts.ranges_to_update.add_ranges(
227 ts_tree
228 .changed_ranges(&new_ts_tree)
229 .map(|r| r.start_byte..r.end_byte),
230 );
231
232 *ts_tree = new_ts_tree;
233 } else {
234 parts.ranges_to_update.add_ranges(tree.region.iter());
235 tree.ts_tree = Some(new_ts_tree);
236 }
237
238 tree.needs_parse = false;
239 parsed_at_least_one_region = true;
240 }
241
242 if parsing_failed {
243 return None;
244 }
245
246 Some(parsed_at_least_one_region)
247 }
248
249 fn highlight(&self, range: Range<usize>, parts: &mut BufferParts) {
250 let buf = TsBuf(parts.bytes);
251
252 let tagger = ts_tagger();
253 let (.., Queries { highlights, .. }) = &self.lang_parts;
254
255 for (_, tree) in self.trees.intersecting(range.clone()) {
256 let Some(ts_tree) = tree.ts_tree.as_ref() else {
259 continue;
260 };
261
262 let mut cursor = QueryCursor::new();
263 cursor.set_byte_range(range.clone());
264 let mut hi_captures = cursor.captures(highlights, ts_tree.root_node(), buf);
265
266 while let Some((qm, _)) = hi_captures.next() {
267 let qm: &QueryMatch = qm;
268 for cap in qm.captures.iter() {
269 let ts_range = cap.node.range();
270
271 let (form, priority) = self.forms[cap.index as usize];
274 let range = ts_range.start_byte..ts_range.end_byte;
275 parts.tags.insert(tagger, range, form.to_tag(priority));
276 }
277 }
278 }
279
280 for injection in self.injections.iter() {
281 injection.highlight(range.clone(), parts);
282 }
283 }
284
285 fn inject(&mut self, range: Range<usize>, parts: &mut BufferParts, handle: &Handle) {
286 let range = self
287 .injections
288 .iter()
289 .flat_map({
290 let range = range.clone();
291 move |inj| inj.trees.intersecting(range.clone())
292 })
293 .fold(range, |range, (_, tree)| {
294 let inj_range = tree.region.iter().next().unwrap();
295 range.start.min(inj_range.start)..range.end.max(inj_range.end)
296 });
297
298 let buf = TsBuf(parts.bytes);
299 let (.., Queries { injections, .. }) = self.lang_parts;
300
301 let cn = injections.capture_names();
302 let is_content = |cap: &&QueryCapture| cn[cap.index as usize] == "injection.content";
303 let language = |qm: &QueryMatch, props: &[QueryProperty]| {
304 props
305 .iter()
306 .find_map(|p| {
307 (p.key.as_ref() == "injection.language")
308 .then_some(p.value.as_ref().unwrap().to_string())
309 })
310 .or_else(|| {
311 let cap = qm
312 .captures
313 .iter()
314 .find(|cap| cn[cap.index as usize] == "injection.language")?;
315 Some(parts.bytes.strs(cap.node.byte_range()).unwrap().to_string())
316 })
317 };
318
319 let mut cursor = QueryCursor::new();
320 let mut observed_injections = Vec::new();
321 let mut defered_ranges = Vec::new();
322
323 for (_, tree) in self.trees.intersecting(range.clone()) {
324 let ts_tree = tree.ts_tree.as_ref().unwrap();
325
326 cursor.set_byte_range(range.clone());
327
328 let mut inj_captures = cursor.captures(injections, ts_tree.root_node(), buf);
329
330 while let Some((qm, _)) = inj_captures.next() {
331 let Some(cap) = qm.captures.iter().find(is_content) else {
332 continue;
333 };
334
335 let cap_range = cap.node.byte_range();
336 let props = injections.property_settings(qm.pattern_index);
337
338 let Some(filetype) = language(qm, props) else {
339 continue;
340 };
341
342 let Some(mut lang_parts) = lang_parts_of(&filetype, handle) else {
343 defered_ranges.push(cap_range);
344 continue;
345 };
346
347 if let Some(prop) = props.iter().find(|p| p.key.as_ref() == "injection.query")
349 && let Some(value) = prop.value.as_ref()
350 {
351 match query_from_path(&filetype, value, lang_parts.1) {
352 Ok(injections) => {
353 lang_parts.2.injections = injections;
354 }
355 Err(err) => context::error!("{err}"),
356 }
357 };
358
359 if let Some(injection) = self
360 .injections
361 .iter_mut()
362 .find(|injection| injection.lang_parts.0 == lang_parts.0)
363 {
364 if injection.trees.add_region(Ranges::new(cap_range.clone())) {
365 parts.ranges_to_update.add_ranges([cap_range.clone()]);
366 }
367 } else {
368 let mut parser = TsParser::new();
369 parser.set_language(lang_parts.1).unwrap();
370 self.injections.push(Parser {
371 parser,
372 trees: Trees::new([Ranges::new(cap_range.clone())]),
373 lang_parts,
374 forms: forms_from_lang_parts(lang_parts),
375 ranges_to_inject: Ranges::new(0..parts.bytes.len().byte()),
376 injections: Vec::new(),
377 is_parsing: false,
378 });
379
380 parts.ranges_to_update.add_ranges([cap_range.clone()]);
381 };
382
383 observed_injections.push((lang_parts.0, cap_range.clone()));
384 }
385 }
386
387 for injection in self.injections.iter_mut() {
388 let mut to_remove = Vec::new();
389
390 for (i, tree) in injection.trees.intersecting(range.clone()) {
391 let range = tree.region.iter().next().unwrap();
393 if observed_injections
394 .extract_if(.., |(filetype, r)| {
395 *r == range && *filetype == injection.lang_parts.0
396 })
397 .next()
398 .is_none()
399 {
400 parts.ranges_to_update.add_ranges(tree.region.iter());
401 to_remove.push((i, range));
402 }
403 }
404
405 for (i, range) in to_remove.into_iter().rev() {
406 injection.trees.remove(i);
407 injection.remove_injections_on(range);
408 }
409 }
410
411 _ = self.ranges_to_inject.remove_on(range);
412
413 for range in defered_ranges {
414 self.ranges_to_inject.add(range);
415 }
416 }
417
418 fn remove_injections_on(&mut self, range: Range<usize>) {
419 for injection in self.injections.iter_mut() {
420 let trees: Vec<usize> = injection
421 .trees
422 .intersecting(range.clone())
423 .map(|(i, _)| i)
424 .collect();
425
426 for i in trees.into_iter().rev() {
427 injection.trees.remove(i);
428 }
429
430 injection.remove_injections_on(range.clone());
431 }
432 }
433
434 pub fn indent_on<'a>(&'a self, p: Point, bytes: &Bytes, opts: PrintOpts) -> Option<usize> {
436 let (_, tree) = self.trees.intersecting(p.byte()..p.byte() + 1).next()?;
437 let ts_tree = tree.ts_tree.as_ref()?;
438
439 if let Some(indent) = self
440 .injections
441 .iter()
442 .find_map(|injection| injection.indent_on(p, bytes, opts))
443 {
444 return Some(indent);
445 }
446
447 let (.., Queries { indents, .. }) = self.lang_parts;
448
449 let root = ts_tree.root_node();
450 let start = bytes.point_at_line(p.line());
451 let first_line = bytes
452 .point_at_byte(tree.region.iter().next().unwrap().start)
453 .line();
454
455 if indents.pattern_count() == 0 {
457 return None;
458 }
459
460 type Captures<'a> = HashMap<&'a str, HashMap<usize, HashMap<&'a str, Option<&'a str>>>>;
463 let mut caps = HashMap::new();
464 let q = {
465 let mut cursor = QueryCursor::new();
466 let buf = TsBuf(bytes);
467 cursor
468 .matches(indents, root, buf)
469 .for_each(|qm: &QueryMatch| {
470 for cap in qm.captures.iter() {
471 let Some(name) =
472 indents.capture_names()[cap.index as usize].strip_prefix("indent.")
473 else {
474 continue;
475 };
476
477 let nodes = if let Some(nodes) = caps.get_mut(name) {
478 nodes
479 } else {
480 caps.insert(name, HashMap::new());
481 caps.get_mut(name).unwrap()
482 };
483 let props = indents.property_settings(qm.pattern_index).iter();
484 nodes.insert(
485 cap.node.id(),
486 props
487 .map(|p| {
488 let key = p.key.strip_prefix("indent.").unwrap();
489 (key, p.value.as_deref())
490 })
491 .collect(),
492 );
493 }
494 });
495
496 |caps: &Captures, node: Node, queries: &[&str]| {
497 caps.get(queries[0])
498 .and_then(|nodes| nodes.get(&node.id()))
499 .is_some_and(|props| {
500 let key = queries.get(1);
501 key.is_none_or(|key| props.iter().any(|(k, _)| k == key))
502 })
503 }
504 };
505
506 let indented_start_byte = bytes
508 .chars_fwd(start..)
509 .unwrap()
510 .take_while(|(_, char)| *char != '\n')
511 .find_map(|(p, c)| (!c.is_whitespace()).then_some(p));
512
513 let mut opt_node = if let Some(indented_start_byte) = indented_start_byte {
514 Some(descendant_in(root, indented_start_byte))
515 } else {
517 let mut lines = bytes.lines(..start).rev();
519 let Some((prev_l, line)) = lines
520 .find(|(_, line)| !(line.matches_pat(r"^\s*$").unwrap()))
521 .filter(|(l, _)| *l >= first_line)
522 else {
523 return Some(0);
525 };
526 let trail = line.chars().rev().take_while(|c| c.is_whitespace()).count();
527
528 let prev_range = bytes.line_range(prev_l);
529 let mut node = descendant_in(root, prev_range.end.byte() - (trail + 1));
530 if node.kind().contains("comment") {
531 let first_node = descendant_in(root, prev_range.start.byte());
535 if first_node.id() != node.id() {
536 node = descendant_in(root, node.start_byte() - 1)
537 }
538 }
539
540 Some(if q(&caps, node, &["end"]) {
541 descendant_in(root, start.byte())
542 } else {
543 node
544 })
545 };
546
547 if q(&caps, opt_node.unwrap(), &["zero"]) {
548 return Some(0);
549 }
550
551 let tab = opts.tabstop as i32;
552 let mut indent = if root.start_byte() != 0 {
553 bytes.indent(bytes.point_at_byte(root.start_byte()), opts) as i32
554 } else {
555 0
556 };
557
558 let mut processed_lines = Vec::new();
559 while let Some(node) = opt_node {
560 let s_line = node.start_position().row;
561 let e_line = node.end_position().row;
562
563 if !q(&caps, node, &["begin"]) && s_line < p.line() && p.line() <= e_line {
566 if !q(&caps, node, &["align"]) && q(&caps, node, &["auto"]) {
567 return None;
568 } else if q(&caps, node, &["ignore"]) {
569 return Some(0);
570 }
571 }
572
573 let should_process = !processed_lines.contains(&s_line);
574
575 let mut is_processed = false;
576
577 if should_process
578 && ((s_line == p.line() && q(&caps, node, &["branch"]))
579 || (s_line != p.line() && q(&caps, node, &["dedent"])))
580 {
581 indent -= tab;
582 is_processed = true;
583 }
584
585 let is_in_err = should_process && node.parent().is_some_and(|p| p.is_error());
586 if should_process
589 && q(&caps, node, &["begin"])
590 && (s_line != e_line || is_in_err || q(&caps, node, &["begin", "immediate"]))
591 && (s_line != p.line() || q(&caps, node, &["begin", "start_at_same_line"]))
592 {
593 is_processed = true;
594 indent += tab;
595 }
596
597 if is_in_err && !q(&caps, node, &["align"]) {
598 let mut cursor = node.walk();
599 for child in node.children(&mut cursor) {
600 if q(&caps, child, &["align"]) {
601 let props = caps["align"][&child.id()].clone();
602 caps.get_mut("align").unwrap().insert(node.id(), props);
603 }
604 }
605 }
606
607 let fd = |node: Node<'a>, delim: &str| -> (Option<Node<'a>>, bool) {
608 let mut c = node.walk();
609 let child = node.children(&mut c).find(|child| child.kind() == delim);
610 let ret = child.map(|child| {
611 let range = bytes.line_range(child.start_position().row);
612 let range = child.range().start_byte..range.end.byte();
613
614 let is_last_in_line = if let Some(line) = bytes.get_contiguous(range.clone()) {
615 line.split_whitespace().any(|w| w != delim)
616 } else {
617 let line = bytes.slices(range).try_to_string().unwrap();
618 line.split_whitespace().any(|w| w != delim)
619 };
620
621 (child, is_last_in_line)
622 });
623 let (child, is_last_in_line) = ret.unzip();
624 (child, is_last_in_line.unwrap_or(false))
625 };
626
627 if should_process
628 && q(&caps, node, &["align"])
629 && (s_line != e_line || is_in_err)
630 && s_line != p.line()
631 {
632 let props = &caps["align"][&node.id()];
633 let (o_delim_node, o_is_last_in_line) = props
634 .get(&"open_delimiter")
635 .and_then(|delim| delim.map(|d| fd(node, d)))
636 .unwrap_or((Some(node), false));
637 let (c_delim_node, c_is_last_in_line) = props
638 .get(&"close_delimiter")
639 .and_then(|delim| delim.map(|d| fd(node, d)))
640 .unwrap_or((Some(node), false));
641
642 if let Some(o_delim_node) = o_delim_node {
643 let o_s_line = o_delim_node.start_position().row;
644 let o_s_col = o_delim_node.start_position().column;
645 let c_s_line = c_delim_node.map(|n| n.start_position().row);
646
647 let indent_is_absolute = if o_is_last_in_line && should_process {
650 indent += tab;
651 if c_is_last_in_line && c_s_line.is_some_and(|l| l < p.line()) {
654 indent = (indent - tab).max(0);
655 }
656 false
657 } else if c_is_last_in_line
659 && let Some(c_s_line) = c_s_line
660 && (o_s_line != c_s_line && c_s_line < p.line())
663 {
664 indent = (indent - tab).max(0);
665 false
666 } else {
667 let inc = props.get("increment").cloned().flatten();
668 indent = o_s_col as i32 + inc.map(str::parse::<i32>).unwrap().unwrap();
669 true
670 };
671
672 let avoid_last_matching_next = c_s_line
676 .is_some_and(|c_s_line| c_s_line != o_s_line && c_s_line == p.line())
677 && props.contains_key("avoid_last_matching_next");
678 if avoid_last_matching_next {
679 indent += tab;
680 }
681 is_processed = true;
682 if indent_is_absolute {
683 return Some(indent as usize);
684 }
685 }
686 }
687
688 if should_process && is_processed {
689 processed_lines.push(s_line);
690 }
691 opt_node = node.parent();
692 }
693
694 (indent >= 0).then_some(indent as usize)
696 }
697
698 fn edit(&mut self, edit: &InputEdit) {
699 self.parser.reset();
700 self.trees.edit(edit);
701
702 for (_, tree) in self
703 .trees
704 .intersecting_mut(edit.start_byte..edit.new_end_byte)
705 {
706 tree.needs_parse = true;
707 }
708
709 for injection in self.injections.iter_mut() {
710 injection.edit(edit);
711 }
712 }
713}
714
715pub(crate) fn sync_parse<'p>(
717 pa: &'p mut Pass,
718 handle: &'p Handle,
719) -> Option<(&'p Parser, &'p Buffer)> {
720 let printed_lines = handle.printed_line_ranges(pa);
721 let visible_ranges = get_visible_ranges(&printed_lines);
722 let (parser, buffer) = PARSERS.write(pa, handle)?;
723
724 let mut parts = TRACKER.parts(buffer).unwrap();
725
726 apply_changes(&parts, parser);
727 parser.parse(&mut parts, &visible_ranges, None, handle);
728
729 Some((parser, buffer))
730}
731
732fn ts_tagger() -> Tagger {
734 static TAGGER: LazyLock<Tagger> = Tagger::new_static();
735 *TAGGER
736}
737
738fn input_edit(change: Change<&str>, bytes: &Bytes) -> InputEdit {
739 let start = change.start();
740 let added = change.added_end();
741 let taken = change.taken_end();
742
743 let ts_start = ts_point(start, bytes);
744 let ts_taken_end = ts_point_from(taken, (ts_start.column, start), change.taken_str());
745 let ts_added_end = ts_point_from(added, (ts_start.column, start), change.added_str());
746
747 InputEdit {
748 start_byte: start.byte(),
749 old_end_byte: taken.byte(),
750 new_end_byte: added.byte(),
751 start_position: ts_start,
752 old_end_position: ts_taken_end,
753 new_end_position: ts_added_end,
754 }
755}
756
757fn must_yield(start: Option<Instant>) -> bool {
759 if let Some(start) = start {
760 start.elapsed() >= PARSE_TIMEOUT && duat_core::context::has_unhandled_events()
761 } else {
762 false
763 }
764}
765
766fn get_visible_ranges(printed_lines: &[Range<usize>]) -> Vec<Range<usize>> {
767 let mut ranges_to_parse: Vec<Range<usize>> = Vec::new();
768 for range in printed_lines {
769 if let Some(last) = ranges_to_parse.last_mut()
770 && last.end == range.start
771 {
772 last.end = range.end
773 } else {
774 ranges_to_parse.push(range.clone())
775 }
776 }
777 ranges_to_parse
778}
779
780fn ts_point(point: Point, bytes: &Bytes) -> TsPoint {
781 let strs = bytes.slices(..point.byte());
782 let iter = strs.into_iter().rev();
783 let col = iter.take_while(|&b| b != b'\n').count();
784
785 TsPoint::new(point.line(), col)
786}
787
788fn ts_point_from(to: Point, (col, from): (usize, Point), str: &str) -> TsPoint {
789 let col = if to.line() == from.line() {
790 col + str.len()
791 } else {
792 str.bytes().rev().take_while(|&b| b != b'\n').count()
793 };
794
795 TsPoint::new(to.line(), col)
796}
797
798#[track_caller]
799fn apply_changes(parts: &BufferParts<'_>, parser: &mut Parser) {
800 for change in parts.changes.clone() {
801 let start = parts.bytes.point_at_line(change.start().line());
802 let end = parts.bytes.line_range(change.added_end().line()).end;
803 parts.ranges_to_update.add_ranges([start..end]);
804 let edit = input_edit(change, parts.bytes);
805 parser.edit(&edit);
806 }
807}
808
809#[track_caller]
810fn descendant_in(node: Node, byte: usize) -> Node {
811 node.descendant_for_byte_range(byte, byte + 1).unwrap()
812}
813
814fn parser_fn<'a>(bytes: &'a Bytes) -> impl FnMut(usize, TsPoint) -> &'a [u8] {
815 let [s0, s1] = bytes.slices(..).to_array();
816 |byte, _point| {
817 if byte < s0.len() {
818 &s0[byte..]
819 } else {
820 &s1[byte - s0.len()..]
821 }
822 }
823}
824
825fn forms_from_lang_parts(
826 (lang, _, Queries { highlights, .. }): LangParts<'static>,
827) -> &'static [(FormId, u8)] {
828 #[rustfmt::skip]
829 const PRIORITIES: &[&str] = &[
830 "markup", "operator", "comment", "string", "diff", "variable", "module", "label",
831 "character", "boolean", "number", "type", "attribute", "property", "function", "constant",
832 "constructor", "keyword", "punctuation",
833 ];
834 type MemoizedForms<'a> = HashMap<&'a str, &'a [(FormId, u8)]>;
835
836 static LISTS: LazyLock<Mutex<MemoizedForms<'static>>> = LazyLock::new(Mutex::default);
837 let mut lists = LISTS.lock().unwrap();
838
839 if let Some(forms) = lists.get(lang) {
840 forms
841 } else {
842 let capture_names = highlights.capture_names();
843 let priorities = capture_names.iter().map(|name| {
844 PRIORITIES
845 .iter()
846 .take_while(|p| !name.starts_with(*p))
847 .count() as u8
848 });
849
850 let ids = form::ids_of_non_static(
851 capture_names
852 .iter()
853 .map(|name| name.to_string() + "." + lang),
854 );
855 let forms: Vec<(FormId, u8)> = ids.into_iter().zip(priorities).collect();
856
857 lists.insert(lang, forms.leak());
858 lists.get(lang).unwrap()
859 }
860}
861
862#[derive(Clone, Copy)]
863struct TsBuf<'a>(&'a Bytes);
864
865impl<'a> TextProvider<&'a [u8]> for TsBuf<'a> {
866 type I = std::array::IntoIter<&'a [u8], 2>;
867
868 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
869 let range = node.range();
870 let buffers = self.0.slices(range.start_byte..range.end_byte);
871 buffers.to_array().into_iter()
872 }
873}