1use std::borrow::Cow;
2
3use unicode_width::UnicodeWidthChar;
4
5use crate::toml::TokenIndices;
6use crate::toml::TokenKind;
7use crate::toml::TomlToken;
8use crate::toml::TomlTokens;
9
10const ARRAY_BRACKETS_WIDTH: usize = 2;
12
13const COMMA_SPACE_WIDTH: usize = 2;
15
16#[tracing::instrument]
27pub fn reflow_arrays(tokens: &mut TomlTokens<'_>, array_width: usize, tab_spaces: usize) {
28 let mut indices = TokenIndices::new();
29 let mut inline_table_depth = 0usize;
30 let mut nesting_depth = 0usize;
31
32 while let Some(i) = indices.next_index(tokens) {
33 match tokens.tokens[i].kind {
34 TokenKind::InlineTableOpen => {
35 inline_table_depth += 1;
36 nesting_depth += 1;
37 }
38 TokenKind::InlineTableClose => {
39 inline_table_depth = inline_table_depth.saturating_sub(1);
40 nesting_depth = nesting_depth.saturating_sub(1);
41 }
42 TokenKind::ArrayOpen => {
43 process_array(
44 tokens,
45 i,
46 inline_table_depth,
47 nesting_depth,
48 array_width,
49 tab_spaces,
50 );
51 nesting_depth += 1;
52 }
53 TokenKind::ArrayClose => {
54 nesting_depth = nesting_depth.saturating_sub(1);
55 }
56 _ => {}
57 }
58 }
59}
60
61fn process_array(
63 tokens: &mut TomlTokens<'_>,
64 open_index: usize,
65 inline_table_depth: usize,
66 nesting_depth: usize,
67 array_width: usize,
68 tab_spaces: usize,
69) {
70 if let Some(action) = determine_array_action(
71 tokens,
72 open_index,
73 inline_table_depth,
74 array_width,
75 tab_spaces,
76 ) {
77 apply_array_action(
78 tokens,
79 open_index,
80 action,
81 tab_spaces,
82 nesting_depth,
83 array_width,
84 );
85 }
86}
87
88enum ArrayAction {
90 Collapse { close: usize },
92 CollapseWithComment { close: usize },
94 Expand { close: usize },
96 Normalize { close: usize },
98 ReflowGrouped { close: usize },
100}
101
102fn determine_array_action(
104 tokens: &TomlTokens<'_>,
105 open: usize,
106 inline_table_depth: usize,
107 array_width: usize,
108 tab_spaces: usize,
109) -> Option<ArrayAction> {
110 if inline_table_depth > 0 {
112 return None;
113 }
114
115 let close = find_array_close(tokens, open)?;
116
117 if is_array_vertical(tokens, open, close) {
118 determine_vertical_array_action(tokens, open, close, array_width, tab_spaces)
119 } else {
120 determine_horizontal_array_action(tokens, open, close, array_width, tab_spaces)
121 }
122}
123
124fn determine_vertical_array_action(
126 tokens: &TomlTokens<'_>,
127 open: usize,
128 close: usize,
129 array_width: usize,
130 tab_spaces: usize,
131) -> Option<ArrayAction> {
132 let comment_pos = comment_position(tokens, open, close);
133
134 if should_collapse_array(tokens, open, close, array_width, tab_spaces) {
135 return match comment_pos {
136 CommentPosition::LastElementOnly => Some(ArrayAction::CollapseWithComment { close }),
137 _ => Some(ArrayAction::Collapse { close }),
138 };
139 }
140
141 if is_properly_vertical(tokens, open, close) {
143 return None;
144 }
145
146 let uniform_widths = has_uniform_element_widths(tokens, open, close);
151
152 match (comment_pos, uniform_widths) {
153 (CommentPosition::NonLastElement | CommentPosition::BeforeClose, true) => {
155 Some(ArrayAction::ReflowGrouped { close })
156 }
157 _ => Some(ArrayAction::Normalize { close }),
159 }
160}
161
162fn determine_horizontal_array_action(
164 tokens: &TomlTokens<'_>,
165 open: usize,
166 close: usize,
167 array_width: usize,
168 tab_spaces: usize,
169) -> Option<ArrayAction> {
170 if should_reflow_array(tokens, open, close, array_width, tab_spaces) {
171 Some(ArrayAction::Expand { close })
172 } else {
173 None
174 }
175}
176
177fn apply_array_action(
179 tokens: &mut TomlTokens<'_>,
180 open: usize,
181 action: ArrayAction,
182 tab_spaces: usize,
183 nesting_depth: usize,
184 array_width: usize,
185) {
186 match action {
187 ArrayAction::Collapse { close } => {
188 collapse_array_to_horizontal(tokens, open, close);
189 }
190 ArrayAction::CollapseWithComment { close } => {
191 collapse_with_trailing_comment(tokens, open, close, nesting_depth, tab_spaces);
192 }
193 ArrayAction::Expand { close } => {
194 reflow_array_to_vertical(tokens, open, close, tab_spaces, nesting_depth);
195 }
196 ArrayAction::Normalize { close } => {
197 collapse_array_to_horizontal(tokens, open, close);
198 let new_close = find_array_close(tokens, open).unwrap_or(open);
199 reflow_array_to_vertical(tokens, open, new_close, tab_spaces, nesting_depth);
200 }
201 ArrayAction::ReflowGrouped { close } => {
202 reflow_grouped(tokens, open, close, tab_spaces, nesting_depth, array_width);
203 }
204 }
205}
206
207fn should_reflow_array(
209 tokens: &TomlTokens<'_>,
210 open_index: usize,
211 close_index: usize,
212 array_width: usize,
213 tab_spaces: usize,
214) -> bool {
215 let line_start = find_line_start(tokens, open_index);
217 let line_width: usize = tokens.tokens[line_start..=close_index]
218 .iter()
219 .map(|t| token_width(&t.raw, tab_spaces))
220 .sum();
221
222 line_width > array_width
223}
224
225fn is_array_vertical(tokens: &TomlTokens<'_>, open_index: usize, close_index: usize) -> bool {
227 tokens.tokens[open_index..=close_index]
228 .iter()
229 .any(|t| t.kind == TokenKind::Newline)
230}
231
232fn is_properly_vertical(tokens: &TomlTokens<'_>, open_index: usize, close_index: usize) -> bool {
239 if open_index + 1 >= close_index {
241 return true; }
243 if tokens.tokens[open_index + 1].kind != TokenKind::Newline {
244 return false;
245 }
246
247 let mut local_depth = 0;
250 for i in (open_index + 1)..close_index {
251 match tokens.tokens[i].kind {
252 TokenKind::ArrayOpen | TokenKind::InlineTableOpen => local_depth += 1,
253 TokenKind::ArrayClose | TokenKind::InlineTableClose => local_depth -= 1,
254 TokenKind::ValueSep if local_depth == 0 => {
255 let mut j = i + 1;
257 while j < close_index && tokens.tokens[j].kind == TokenKind::Whitespace {
258 j += 1;
259 }
260 if j < close_index && tokens.tokens[j].kind != TokenKind::Newline {
261 return false;
262 }
263 }
264 TokenKind::Comment if local_depth == 0 => {
265 if is_standalone_comment(tokens, i, open_index) {
267 return false; }
269 }
270 _ => {}
271 }
272 }
273
274 true
275}
276
277fn is_standalone_comment(tokens: &TomlTokens<'_>, comment_index: usize, open_index: usize) -> bool {
279 if comment_index <= open_index + 1 {
280 return false;
281 }
282
283 let mut i = comment_index - 1;
285 if tokens.tokens[i].kind == TokenKind::Whitespace {
286 if i > open_index + 1 {
287 i -= 1;
288 } else {
289 return false;
290 }
291 }
292
293 tokens.tokens[i].kind == TokenKind::Newline
294}
295
296fn find_array_close(tokens: &TomlTokens<'_>, open_index: usize) -> Option<usize> {
300 let mut depth = 0;
301 for i in open_index..tokens.len() {
302 match tokens.tokens[i].kind {
303 TokenKind::ArrayOpen => depth += 1,
304 TokenKind::ArrayClose => {
305 depth -= 1;
306 if depth == 0 {
307 return Some(i);
308 }
309 }
310 _ => {}
311 }
312 }
313 None
314}
315
316fn find_line_start(tokens: &TomlTokens<'_>, from_index: usize) -> usize {
318 for i in (0..from_index).rev() {
319 if tokens.tokens[i].kind == TokenKind::Newline {
320 return i + 1;
321 }
322 }
323 0
324}
325
326fn has_uniform_element_widths(
331 tokens: &TomlTokens<'_>,
332 open_index: usize,
333 close_index: usize,
334) -> bool {
335 let widths = collect_element_widths(tokens, open_index, close_index);
336 all_widths_equal(&widths)
337}
338
339fn all_widths_equal(widths: &[usize]) -> bool {
341 match widths.first() {
342 None => true,
343 Some(&first) => widths.iter().all(|&w| w == first),
344 }
345}
346
347fn collect_element_widths(
349 tokens: &TomlTokens<'_>,
350 open_index: usize,
351 close_index: usize,
352) -> Vec<usize> {
353 let mut collector = ElementWidthCollector::new();
354
355 for i in (open_index + 1)..close_index {
356 collector.process_token(&tokens.tokens[i]);
357 }
358
359 collector.widths
360}
361
362struct ElementWidthCollector {
364 widths: Vec<usize>,
365 depth: i32,
366 current_width: usize,
367 in_nested_element: bool,
368}
369
370impl ElementWidthCollector {
371 fn new() -> Self {
372 Self {
373 widths: Vec::new(),
374 depth: 0,
375 current_width: 0,
376 in_nested_element: false,
377 }
378 }
379
380 fn process_token(&mut self, token: &TomlToken<'_>) {
381 match token.kind {
382 TokenKind::ArrayOpen | TokenKind::InlineTableOpen => self.enter_nested(token),
383 TokenKind::ArrayClose | TokenKind::InlineTableClose => self.exit_nested(token),
384 TokenKind::Scalar => self.handle_scalar(token),
385 TokenKind::ValueSep if self.depth == 0 => self.handle_top_level_comma(),
386 TokenKind::Whitespace | TokenKind::Newline | TokenKind::Comment => {}
387 _ if self.depth > 0 => self.current_width += token.raw.len(),
388 _ => {}
389 }
390 }
391
392 fn enter_nested(&mut self, token: &TomlToken<'_>) {
393 self.depth += 1;
394 if self.depth == 1 {
395 self.in_nested_element = true;
396 }
397 self.current_width += token.raw.len();
398 }
399
400 fn exit_nested(&mut self, token: &TomlToken<'_>) {
401 self.current_width += token.raw.len();
402 self.depth -= 1;
403 if self.depth == 0 && self.in_nested_element {
404 self.finish_nested_element();
405 }
406 }
407
408 fn handle_scalar(&mut self, token: &TomlToken<'_>) {
409 if self.depth == 0 {
410 self.widths.push(token.raw.len());
411 } else {
412 self.current_width += token.raw.len();
413 }
414 }
415
416 fn handle_top_level_comma(&mut self) {
417 if self.in_nested_element {
418 self.finish_nested_element();
419 }
420 }
421
422 fn finish_nested_element(&mut self) {
423 self.widths.push(self.current_width);
424 self.current_width = 0;
425 self.in_nested_element = false;
426 }
427}
428
429fn token_width(raw: &str, tab_spaces: usize) -> usize {
437 raw.chars()
438 .map(|c| {
439 if c == '\t' {
440 tab_spaces
441 } else {
442 c.width().unwrap_or(0)
443 }
444 })
445 .sum()
446}
447
448fn reflow_array_to_vertical(
453 tokens: &mut TomlTokens<'_>,
454 open_index: usize,
455 close_index: usize,
456 tab_spaces: usize,
457 nesting_depth: usize,
458) {
459 let indent = make_indent(nesting_depth + 1, tab_spaces);
460 let close_indent = make_indent(nesting_depth, tab_spaces);
461
462 clear_post_comma_whitespace(tokens, open_index, close_index);
463 let close_index = ensure_trailing_comma(tokens, open_index, close_index);
464
465 let insertions =
466 collect_vertical_insertions(tokens, open_index, close_index, &indent, &close_indent);
467
468 apply_newline_insertions(tokens, insertions);
469 tokens.trim_empty_whitespace();
470}
471
472fn reflow_grouped(
477 tokens: &mut TomlTokens<'_>,
478 open_index: usize,
479 close_index: usize,
480 tab_spaces: usize,
481 nesting_depth: usize,
482 array_width: usize,
483) {
484 let has_standalone_trailing_comment =
486 has_standalone_trailing_comment(tokens, open_index, close_index);
487
488 let close = remove_newlines_and_indents(tokens, open_index, close_index);
490 let close = remove_pre_comma_whitespace(tokens, open_index, close);
491 normalize_comma_spacing(tokens, open_index, close);
492
493 let close = find_array_close(tokens, open_index).unwrap_or(close);
495
496 let config = GroupingConfig {
498 indent: make_indent(nesting_depth + 1, tab_spaces),
499 close_indent: make_indent(nesting_depth, tab_spaces),
500 array_width,
501 tab_spaces,
502 has_standalone_trailing_comment,
503 };
504
505 let insertions = collect_grouped_insertions(tokens, open_index, close, &config);
506
507 apply_newline_insertions(tokens, insertions);
508 remove_trailing_whitespace(tokens);
509 tokens.trim_empty_whitespace();
510}
511
512fn remove_trailing_whitespace(tokens: &mut TomlTokens<'_>) {
514 let mut i = 0;
515 while i + 1 < tokens.tokens.len() {
516 if tokens.tokens[i].kind == TokenKind::Whitespace
517 && tokens.tokens[i + 1].kind == TokenKind::Newline
518 {
519 tokens.tokens[i].raw = Cow::Borrowed("");
521 }
522 i += 1;
523 }
524}
525
526fn has_standalone_trailing_comment(
528 tokens: &TomlTokens<'_>,
529 open_index: usize,
530 close_index: usize,
531) -> bool {
532 find_trailing_comment_index(tokens, open_index, close_index)
533 .map(|idx| is_on_own_line(tokens, idx, open_index))
534 .unwrap_or(false)
535}
536
537fn find_trailing_comment_index(
540 tokens: &TomlTokens<'_>,
541 open_index: usize,
542 close_index: usize,
543) -> Option<usize> {
544 let idx = skip_backwards(tokens, close_index.saturating_sub(1), open_index, |kind| {
545 matches!(kind, TokenKind::Whitespace | TokenKind::Newline)
546 });
547
548 if idx > open_index && tokens.tokens[idx].kind == TokenKind::Comment {
549 Some(idx)
550 } else {
551 None
552 }
553}
554
555fn is_on_own_line(tokens: &TomlTokens<'_>, index: usize, min_index: usize) -> bool {
557 if index <= min_index {
558 return false;
559 }
560
561 let check = skip_backwards(tokens, index - 1, min_index, |kind| {
563 kind == TokenKind::Whitespace
564 });
565
566 check > min_index && tokens.tokens[check].kind == TokenKind::Newline
567}
568
569fn skip_backwards(
571 tokens: &TomlTokens<'_>,
572 start: usize,
573 min_index: usize,
574 should_skip: impl Fn(TokenKind) -> bool,
575) -> usize {
576 let mut idx = start;
577 while idx > min_index && should_skip(tokens.tokens[idx].kind) {
578 idx -= 1;
579 }
580 idx
581}
582
583struct GroupingConfig {
585 indent: String,
586 close_indent: String,
587 array_width: usize,
588 tab_spaces: usize,
589 has_standalone_trailing_comment: bool,
590}
591
592struct GroupingState<'a> {
594 insertions: Vec<(usize, String)>,
595 current_line_width: usize,
596 base_width: usize,
597 indent: &'a str,
598}
599
600impl<'a> GroupingState<'a> {
601 fn new(base_width: usize, indent: &'a str) -> Self {
602 Self {
603 insertions: Vec::new(),
604 current_line_width: base_width + indent.len(),
605 base_width,
606 indent,
607 }
608 }
609
610 fn insert_newline(&mut self, index: usize) {
611 self.insertions.push((index, self.indent.to_owned()));
612 self.current_line_width = self.base_width + self.indent.len();
613 }
614
615 fn update_width(&mut self, projected: usize) {
616 self.current_line_width = projected;
617 }
618}
619
620fn collect_grouped_insertions(
622 tokens: &TomlTokens<'_>,
623 open_index: usize,
624 close_index: usize,
625 config: &GroupingConfig,
626) -> Vec<(usize, String)> {
627 let base_width = calculate_base_width(tokens, open_index, config.tab_spaces);
628 let mut state = GroupingState::new(base_width, &config.indent);
629
630 state.insert_newline(open_index + 1);
632
633 let mut local_depth = 0;
634
635 for i in (open_index + 1)..close_index {
636 let kind = tokens.tokens[i].kind;
637 local_depth += depth_delta(kind);
638
639 if local_depth != 0 {
640 continue;
641 }
642
643 match kind {
644 TokenKind::Comment => {
645 handle_comment_insertion(tokens, i, close_index, &mut state);
646 }
647 TokenKind::ValueSep => {
648 handle_comma_insertion(tokens, i, close_index, config, &mut state);
649 }
650 _ => {}
651 }
652 }
653
654 state
656 .insertions
657 .push((close_index, config.close_indent.clone()));
658 state.insertions
659}
660
661fn calculate_base_width(tokens: &TomlTokens<'_>, open_index: usize, tab_spaces: usize) -> usize {
663 let line_start = find_line_start(tokens, open_index);
664 tokens.tokens[line_start..=open_index]
665 .iter()
666 .map(|t| token_width(&t.raw, tab_spaces))
667 .sum()
668}
669
670fn handle_comment_insertion(
672 tokens: &TomlTokens<'_>,
673 comment_index: usize,
674 close_index: usize,
675 state: &mut GroupingState<'_>,
676) {
677 let has_value_after = has_value_after_index(tokens, comment_index, close_index);
679 if has_value_after && comment_index + 1 < close_index {
680 state.insert_newline(comment_index + 1);
681 }
682}
683
684fn handle_comma_insertion(
686 tokens: &TomlTokens<'_>,
687 comma_index: usize,
688 close_index: usize,
689 config: &GroupingConfig,
690 state: &mut GroupingState<'_>,
691) {
692 match peek_after_comma(tokens, comma_index, close_index, config.tab_spaces) {
693 NextAfterComma::Element { width, index } => {
694 let projected_width = state.current_line_width + 2 + width; if projected_width > config.array_width {
696 state.insert_newline(index);
697 } else {
698 state.update_width(projected_width);
699 }
700 }
701 NextAfterComma::TrailingComment if config.has_standalone_trailing_comment => {
702 let comment_idx = skip_whitespace(tokens, comma_index + 1, close_index);
703 state.insert_newline(comment_idx);
704 }
705 _ => {}
706 }
707}
708
709fn skip_whitespace(tokens: &TomlTokens<'_>, start: usize, end: usize) -> usize {
711 let mut idx = start;
712 while idx < end && tokens.tokens[idx].kind == TokenKind::Whitespace {
713 idx += 1;
714 }
715 idx
716}
717
718enum NextAfterComma {
720 Element { width: usize, index: usize },
722 TrailingComment,
724 Nothing,
726}
727
728fn peek_after_comma(
730 tokens: &TomlTokens<'_>,
731 comma_index: usize,
732 close_index: usize,
733 tab_spaces: usize,
734) -> NextAfterComma {
735 let mut i = comma_index + 1;
736
737 while i < close_index && tokens.tokens[i].kind == TokenKind::Whitespace {
739 i += 1;
740 }
741
742 if i >= close_index {
743 return NextAfterComma::Nothing;
744 }
745
746 if tokens.tokens[i].kind == TokenKind::Comment && !has_value_after_index(tokens, i, close_index)
748 {
749 return NextAfterComma::TrailingComment;
750 }
751
752 let element_start = i;
754
755 let mut width = 0;
757 let mut local_depth = 0;
758
759 while i < close_index {
760 let kind = tokens.tokens[i].kind;
761 local_depth += depth_delta(kind);
762
763 match kind {
764 TokenKind::ValueSep | TokenKind::Comment if local_depth == 0 => break,
765 TokenKind::ArrayClose if local_depth == 0 => break,
766 _ => {
767 width += token_width(&tokens.tokens[i].raw, tab_spaces);
768 }
769 }
770 i += 1;
771 }
772
773 if width > 0 {
774 NextAfterComma::Element {
775 width,
776 index: element_start,
777 }
778 } else {
779 NextAfterComma::Nothing
780 }
781}
782
783fn ensure_trailing_comma(
787 tokens: &mut TomlTokens<'_>,
788 open_index: usize,
789 close_index: usize,
790) -> usize {
791 match find_last_value_needing_comma(tokens, open_index, close_index) {
792 LastValueResult::AlreadyHasTrailingComma => close_index,
793 LastValueResult::NeedsCommaAfter(idx) => {
794 tokens.tokens.insert(idx + 1, TomlToken::VAL_SEP);
795 close_index + 1
796 }
797 LastValueResult::Empty => close_index,
798 }
799}
800
801enum LastValueResult {
803 AlreadyHasTrailingComma,
805 NeedsCommaAfter(usize),
807 Empty,
809}
810
811fn find_last_value_needing_comma(
813 tokens: &TomlTokens<'_>,
814 open_index: usize,
815 close_index: usize,
816) -> LastValueResult {
817 let mut last_value_index = None;
818 let mut local_depth = 0;
819
820 for i in (open_index + 1)..close_index {
821 let kind = tokens.tokens[i].kind;
822 local_depth += depth_delta(kind);
823
824 match classify_token_for_trailing_comma(kind, local_depth) {
825 TrailingCommaAction::FoundValue => last_value_index = Some(i),
826 TrailingCommaAction::CheckComma => {
827 if is_trailing_comma(tokens, i, close_index) {
828 return LastValueResult::AlreadyHasTrailingComma;
829 }
830 }
831 TrailingCommaAction::Skip => {}
832 }
833 }
834
835 match last_value_index {
836 Some(idx) => LastValueResult::NeedsCommaAfter(idx),
837 None => LastValueResult::Empty,
838 }
839}
840
841fn depth_delta(kind: TokenKind) -> i32 {
843 match kind {
844 TokenKind::ArrayOpen | TokenKind::InlineTableOpen => 1,
845 TokenKind::ArrayClose | TokenKind::InlineTableClose => -1,
846 _ => 0,
847 }
848}
849
850enum TrailingCommaAction {
852 FoundValue,
853 CheckComma,
854 Skip,
855}
856
857fn classify_token_for_trailing_comma(kind: TokenKind, depth: i32) -> TrailingCommaAction {
859 match kind {
860 TokenKind::Whitespace | TokenKind::Newline | TokenKind::Comment => {
861 TrailingCommaAction::Skip
862 }
863 TokenKind::ValueSep if depth == 0 => TrailingCommaAction::CheckComma,
864 TokenKind::ArrayClose | TokenKind::InlineTableClose if depth == 0 => {
865 TrailingCommaAction::FoundValue
866 }
867 _ if depth == 0 => TrailingCommaAction::FoundValue,
868 _ => TrailingCommaAction::Skip,
869 }
870}
871
872fn clear_post_comma_whitespace(tokens: &mut TomlTokens<'_>, open_index: usize, close_index: usize) {
876 let indices_to_clear: Vec<usize> =
877 find_clearable_post_comma_whitespace(tokens, open_index, close_index);
878 for i in indices_to_clear {
879 tokens.tokens[i] = TomlToken::EMPTY;
880 }
881}
882
883fn find_clearable_post_comma_whitespace(
887 tokens: &TomlTokens<'_>,
888 open_index: usize,
889 close_index: usize,
890) -> Vec<usize> {
891 let mut result = Vec::new();
892 let mut local_depth = 0;
893
894 for i in (open_index + 1)..close_index {
895 let kind = tokens.tokens[i].kind;
896 local_depth += depth_delta(kind);
897
898 if kind == TokenKind::ValueSep && local_depth == 0 {
899 if let Some(ws_index) = clearable_whitespace_after(tokens, i, close_index) {
900 result.push(ws_index);
901 }
902 }
903 }
904
905 result
906}
907
908fn clearable_whitespace_after(
913 tokens: &TomlTokens<'_>,
914 comma_index: usize,
915 close_index: usize,
916) -> Option<usize> {
917 let ws_index = comma_index + 1;
918 if ws_index >= close_index || tokens.tokens[ws_index].kind != TokenKind::Whitespace {
919 return None;
920 }
921
922 let next_kind = tokens.tokens[(ws_index + 1)..close_index]
924 .iter()
925 .find(|t| t.kind != TokenKind::Whitespace)
926 .map(|t| t.kind);
927
928 if next_kind == Some(TokenKind::Comment) {
930 return None;
931 }
932
933 Some(ws_index)
934}
935
936fn collect_vertical_insertions<'a>(
938 tokens: &TomlTokens<'_>,
939 open_index: usize,
940 close_index: usize,
941 indent: &'a str,
942 close_indent: &'a str,
943) -> Vec<(usize, &'a str)> {
944 let mut insertions = vec![(open_index + 1, indent)];
945 let mut local_depth = 0;
946
947 for i in (open_index + 1)..close_index {
948 let kind = tokens.tokens[i].kind;
949 local_depth += depth_delta(kind);
950
951 if let Some(insert_index) = insertion_point_for_token(tokens, i, close_index, local_depth) {
952 insertions.push((insert_index, indent));
953 }
954 }
955
956 insertions.push((close_index, close_indent));
957 insertions
958}
959
960fn insertion_point_for_token(
964 tokens: &TomlTokens<'_>,
965 index: usize,
966 close_index: usize,
967 depth: i32,
968) -> Option<usize> {
969 if depth != 0 {
970 return None;
971 }
972
973 match tokens.tokens[index].kind {
974 TokenKind::ValueSep => insertion_after_comma(tokens, index, close_index),
975 TokenKind::Comment => insertion_after_comment(tokens, index, close_index),
976 _ => None,
977 }
978}
979
980fn insertion_after_comma(
984 tokens: &TomlTokens<'_>,
985 comma_index: usize,
986 close_index: usize,
987) -> Option<usize> {
988 if is_trailing_comma(tokens, comma_index, close_index) {
989 return None;
990 }
991
992 if is_followed_by_comment(tokens, comma_index, close_index) {
994 return None;
995 }
996
997 Some(comma_index + 1)
998}
999
1000fn insertion_after_comment(
1004 tokens: &TomlTokens<'_>,
1005 comment_index: usize,
1006 close_index: usize,
1007) -> Option<usize> {
1008 if has_value_after_index(tokens, comment_index, close_index) {
1009 Some(comment_index + 1)
1010 } else {
1011 None
1012 }
1013}
1014
1015fn is_followed_by_comment(tokens: &TomlTokens<'_>, comma_index: usize, close_index: usize) -> bool {
1017 tokens.tokens[(comma_index + 1)..close_index]
1018 .iter()
1019 .find(|t| t.kind != TokenKind::Whitespace)
1020 .map(|t| t.kind == TokenKind::Comment)
1021 .unwrap_or(false)
1022}
1023
1024fn apply_newline_insertions<S: AsRef<str>>(
1026 tokens: &mut TomlTokens<'_>,
1027 insertions: Vec<(usize, S)>,
1028) {
1029 for (index, indent) in insertions.into_iter().rev() {
1030 let indent = indent.as_ref();
1031 if !indent.is_empty() {
1032 tokens.tokens.insert(
1033 index,
1034 TomlToken {
1035 kind: TokenKind::Whitespace,
1036 encoding: None,
1037 decoded: None,
1038 scalar: None,
1039 raw: Cow::Owned(indent.to_owned()),
1040 },
1041 );
1042 }
1043 tokens.tokens.insert(index, TomlToken::NL);
1044 }
1045}
1046
1047fn is_trailing_comma(tokens: &TomlTokens<'_>, comma_index: usize, close_index: usize) -> bool {
1049 tokens.tokens[(comma_index + 1)..close_index]
1050 .iter()
1051 .all(|t| {
1052 matches!(
1053 t.kind,
1054 TokenKind::Whitespace | TokenKind::Newline | TokenKind::Comment
1055 )
1056 })
1057}
1058
1059fn calculate_collapsed_width(
1063 tokens: &TomlTokens<'_>,
1064 open_index: usize,
1065 close_index: usize,
1066 tab_spaces: usize,
1067) -> usize {
1068 let prefix_width = calculate_prefix_width(tokens, open_index, tab_spaces);
1069 let content_width = calculate_content_width(tokens, open_index, close_index, tab_spaces);
1070 prefix_width + content_width
1071}
1072
1073fn calculate_prefix_width(tokens: &TomlTokens<'_>, open_index: usize, tab_spaces: usize) -> usize {
1075 let line_start = find_line_start(tokens, open_index);
1076 tokens.tokens[line_start..open_index]
1077 .iter()
1078 .map(|t| token_width(&t.raw, tab_spaces))
1079 .sum()
1080}
1081
1082fn calculate_content_width(
1084 tokens: &TomlTokens<'_>,
1085 open_index: usize,
1086 close_index: usize,
1087 tab_spaces: usize,
1088) -> usize {
1089 let content_width = ((open_index + 1)..close_index).fold((0, false), |(width, after_nl), i| {
1090 match collapsed_token_contribution(tokens, i, close_index, tab_spaces, after_nl) {
1091 Some((w, new_after_nl)) => (width + w, new_after_nl),
1092 None => (width, false),
1093 }
1094 });
1095 ARRAY_BRACKETS_WIDTH + content_width.0
1096}
1097
1098fn collapsed_token_contribution(
1103 tokens: &TomlTokens<'_>,
1104 index: usize,
1105 close_index: usize,
1106 tab_spaces: usize,
1107 after_newline: bool,
1108) -> Option<(usize, bool)> {
1109 let token = &tokens.tokens[index];
1110
1111 match token.kind {
1112 TokenKind::Newline => Some((0, true)),
1113 TokenKind::Whitespace if after_newline => None, TokenKind::ValueSep if is_trailing_comma(tokens, index, close_index) => Some((0, false)),
1115 TokenKind::ValueSep => Some((COMMA_SPACE_WIDTH, false)),
1116 _ => Some((token_width(&token.raw, tab_spaces), false)),
1117 }
1118}
1119
1120fn should_collapse_array(
1122 tokens: &TomlTokens<'_>,
1123 open_index: usize,
1124 close_index: usize,
1125 array_width: usize,
1126 tab_spaces: usize,
1127) -> bool {
1128 match comment_position(tokens, open_index, close_index) {
1130 CommentPosition::None | CommentPosition::LastElementOnly => {}
1131 CommentPosition::NonLastElement | CommentPosition::BeforeClose => return false,
1132 }
1133
1134 let collapsed_width = calculate_collapsed_width(tokens, open_index, close_index, tab_spaces);
1136
1137 collapsed_width <= array_width
1138}
1139
1140enum CommentPosition {
1142 None,
1144 LastElementOnly,
1146 NonLastElement,
1148 BeforeClose,
1150}
1151
1152struct CommentState {
1154 last_value_index: Option<usize>,
1155 has_trailing_comment: bool,
1156 has_non_last_comment: bool,
1157}
1158
1159impl CommentState {
1160 fn new() -> Self {
1161 Self {
1162 last_value_index: None,
1163 has_trailing_comment: false,
1164 has_non_last_comment: false,
1165 }
1166 }
1167
1168 fn record_value(&mut self, index: usize) {
1169 if self.has_trailing_comment {
1170 self.has_non_last_comment = true;
1171 }
1172 self.last_value_index = Some(index);
1173 self.has_trailing_comment = false;
1174 }
1175
1176 fn into_position(self) -> CommentPosition {
1177 if self.has_non_last_comment {
1178 CommentPosition::NonLastElement
1179 } else if self.has_trailing_comment {
1180 CommentPosition::LastElementOnly
1181 } else {
1182 CommentPosition::None
1183 }
1184 }
1185}
1186
1187fn comment_position(
1189 tokens: &TomlTokens<'_>,
1190 open_index: usize,
1191 close_index: usize,
1192) -> CommentPosition {
1193 let mut state = CommentState::new();
1194 let mut local_depth = 0;
1195
1196 for i in (open_index + 1)..close_index {
1197 let kind = tokens.tokens[i].kind;
1198 local_depth += depth_delta(kind);
1199
1200 match kind {
1201 TokenKind::Comment => {
1202 if let Some(result) = handle_comment(tokens, i, close_index, &mut state) {
1203 return result;
1204 }
1205 }
1206 TokenKind::ArrayClose | TokenKind::InlineTableClose if local_depth == 0 => {
1207 state.record_value(i);
1208 }
1209 TokenKind::Whitespace | TokenKind::Newline | TokenKind::ValueSep => {}
1210 _ if local_depth == 0 => {
1211 state.record_value(i);
1212 }
1213 _ => {}
1214 }
1215 }
1216
1217 state.into_position()
1218}
1219
1220fn handle_comment(
1224 tokens: &TomlTokens<'_>,
1225 comment_index: usize,
1226 close_index: usize,
1227 state: &mut CommentState,
1228) -> Option<CommentPosition> {
1229 let Some(last_idx) = state.last_value_index else {
1230 state.has_non_last_comment = true;
1231 return None;
1232 };
1233
1234 if is_same_line(tokens, last_idx, comment_index) {
1235 state.has_trailing_comment = true;
1236 return None;
1237 }
1238
1239 if has_value_after_index(tokens, comment_index, close_index) {
1241 state.has_non_last_comment = true;
1242 None
1243 } else {
1244 Some(CommentPosition::BeforeClose)
1245 }
1246}
1247
1248fn is_same_line(tokens: &TomlTokens<'_>, from: usize, to: usize) -> bool {
1250 !tokens.tokens[from..to]
1251 .iter()
1252 .any(|t| t.kind == TokenKind::Newline)
1253}
1254
1255fn has_value_after_index(tokens: &TomlTokens<'_>, start: usize, close_index: usize) -> bool {
1257 let mut local_depth = 0;
1258 for i in (start + 1)..close_index {
1259 let kind = tokens.tokens[i].kind;
1260 local_depth += depth_delta(kind);
1261
1262 match kind {
1263 TokenKind::Whitespace
1264 | TokenKind::Newline
1265 | TokenKind::Comment
1266 | TokenKind::ValueSep => {}
1267 TokenKind::ArrayClose | TokenKind::InlineTableClose if local_depth < 0 => {}
1268 _ if local_depth == 0 => return true,
1269 _ => {}
1270 }
1271 }
1272 false
1273}
1274
1275fn collapse_array_to_horizontal(
1277 tokens: &mut TomlTokens<'_>,
1278 open_index: usize,
1279 close_index: usize,
1280) {
1281 let close = remove_newlines_and_indents(tokens, open_index, close_index);
1283 let close = remove_pre_comma_whitespace_and_trailing(tokens, open_index, close);
1284 normalize_comma_spacing(tokens, open_index, close);
1285}
1286
1287fn collapse_with_trailing_comment(
1291 tokens: &mut TomlTokens<'_>,
1292 open_index: usize,
1293 close_index: usize,
1294 nesting_depth: usize,
1295 tab_spaces: usize,
1296) {
1297 let close = remove_newlines_and_indents(tokens, open_index, close_index);
1299
1300 let close = remove_pre_comma_whitespace(tokens, open_index, close);
1302
1303 normalize_comma_spacing(tokens, open_index, close);
1305
1306 let new_close = find_array_close(tokens, open_index).unwrap_or(close);
1308
1309 let indent = make_indent(nesting_depth + 1, tab_spaces);
1310 let close_indent = make_indent(nesting_depth, tab_spaces);
1311
1312 let insertions = vec![(open_index + 1, indent), (new_close, close_indent)];
1314
1315 apply_newline_insertions(tokens, insertions);
1316}
1317
1318fn remove_pre_comma_whitespace(
1320 tokens: &mut TomlTokens<'_>,
1321 open_index: usize,
1322 mut close: usize,
1323) -> usize {
1324 let mut i = open_index + 1;
1325
1326 while i < close {
1327 if is_whitespace_before_comma(tokens, i, close) {
1328 tokens.tokens.remove(i);
1329 close -= 1;
1330 continue;
1331 }
1332 i += 1;
1333 }
1334
1335 close
1336}
1337
1338fn remove_newlines_and_indents(
1342 tokens: &mut TomlTokens<'_>,
1343 open_index: usize,
1344 close_index: usize,
1345) -> usize {
1346 let mut removals: Vec<usize> = Vec::new();
1347 let mut i = open_index + 1;
1348
1349 while i < close_index {
1350 if tokens.tokens[i].kind == TokenKind::Newline {
1351 removals.push(i);
1352 if i + 1 < close_index && tokens.tokens[i + 1].kind == TokenKind::Whitespace {
1353 removals.push(i + 1);
1354 }
1355 }
1356 i += 1;
1357 }
1358
1359 let removal_count = removals.len();
1360 for idx in removals.into_iter().rev() {
1361 tokens.tokens.remove(idx);
1362 }
1363
1364 close_index - removal_count
1365}
1366
1367fn remove_pre_comma_whitespace_and_trailing(
1371 tokens: &mut TomlTokens<'_>,
1372 open_index: usize,
1373 mut close: usize,
1374) -> usize {
1375 let mut i = open_index + 1;
1376
1377 while i < close {
1378 if is_whitespace_before_comma(tokens, i, close) {
1379 tokens.tokens.remove(i);
1380 close -= 1;
1381 continue;
1382 }
1383
1384 if tokens.tokens[i].kind == TokenKind::ValueSep && is_trailing_comma(tokens, i, close) {
1385 tokens.tokens.remove(i);
1386 close -= 1;
1387 continue;
1388 }
1389
1390 i += 1;
1391 }
1392
1393 close
1394}
1395
1396fn is_whitespace_before_comma(tokens: &TomlTokens<'_>, index: usize, close_index: usize) -> bool {
1398 tokens.tokens[index].kind == TokenKind::Whitespace
1399 && index + 1 < close_index
1400 && tokens.tokens[index + 1].kind == TokenKind::ValueSep
1401}
1402
1403fn normalize_comma_spacing(tokens: &mut TomlTokens<'_>, open_index: usize, mut close: usize) {
1405 let mut i = open_index + 1;
1406
1407 while i < close {
1408 if tokens.tokens[i].kind == TokenKind::ValueSep
1409 && i + 1 < close
1410 && ensure_single_space_after(tokens, i)
1411 {
1412 close += 1; i += 1; }
1415 i += 1;
1416 }
1417}
1418
1419fn ensure_single_space_after(tokens: &mut TomlTokens<'_>, index: usize) -> bool {
1423 let next_index = index + 1;
1424 if next_index >= tokens.len() {
1425 return false;
1426 }
1427
1428 let next = &tokens.tokens[next_index];
1429 if next.kind == TokenKind::Whitespace {
1430 if next.raw != " " {
1431 tokens.tokens[next_index] = make_single_space_token();
1432 }
1433 false
1434 } else {
1435 tokens.tokens.insert(next_index, make_single_space_token());
1436 true
1437 }
1438}
1439
1440fn make_single_space_token() -> TomlToken<'static> {
1442 TomlToken {
1443 kind: TokenKind::Whitespace,
1444 encoding: None,
1445 decoded: None,
1446 scalar: None,
1447 raw: Cow::Borrowed(" "),
1448 }
1449}
1450
1451fn make_indent(depth: usize, tab_spaces: usize) -> String {
1453 " ".repeat(depth * tab_spaces)
1454}
1455
1456#[cfg(test)]
1457mod test {
1458 use snapbox::assert_data_eq;
1459 use snapbox::str;
1460 use snapbox::IntoData;
1461
1462 use crate::toml::TomlTokens;
1463
1464 const DEFAULT_TAB_SPACES: usize = 4;
1465
1466 #[track_caller]
1467 fn valid(input: &str, max_width: usize, expected: impl IntoData) {
1468 let mut tokens = TomlTokens::parse(input);
1469 super::reflow_arrays(&mut tokens, max_width, DEFAULT_TAB_SPACES);
1470 let actual = tokens.to_string();
1471
1472 assert_data_eq!(&actual, expected);
1473
1474 let (_, errors) = toml::de::DeTable::parse_recoverable(&actual);
1475 if !errors.is_empty() {
1476 use std::fmt::Write as _;
1477 let mut result = String::new();
1478 writeln!(&mut result, "---").unwrap();
1479 for error in errors {
1480 writeln!(&mut result, "{error}").unwrap();
1481 writeln!(&mut result, "---").unwrap();
1482 }
1483 panic!("failed to parse\n---\n{actual}\n{result}");
1484 }
1485 }
1486
1487 #[test]
1488 fn short_array_not_reflowed() {
1489 valid(
1490 r#"deps = ["a", "b"]
1491"#,
1492 80,
1493 str![[r#"
1494deps = ["a", "b"]
1495
1496"#]],
1497 );
1498 }
1499
1500 #[test]
1501 fn long_array_reflowed() {
1502 valid(
1503 r#"deps = ["foo", "bar", "baz"]
1504"#,
1505 20,
1506 str![[r#"
1507deps = [
1508 "foo",
1509 "bar",
1510 "baz",
1511]
1512
1513"#]],
1514 );
1515 }
1516
1517 #[test]
1518 fn already_vertical_not_modified() {
1519 valid(
1520 r#"deps = [
1521 "foo",
1522 "bar",
1523]
1524"#,
1525 20,
1526 str![[r#"
1527deps = [
1528 "foo",
1529 "bar",
1530]
1531
1532"#]],
1533 );
1534 }
1535
1536 #[test]
1537 fn nested_array_reflowed() {
1538 valid(
1539 r#"matrix = [[1, 2, 3], [4, 5, 6]]
1540"#,
1541 20,
1542 str![[r#"
1543matrix = [
1544 [1, 2, 3],
1545 [4, 5, 6],
1546]
1547
1548"#]],
1549 );
1550 }
1551
1552 #[test]
1553 fn deeply_nested_array() {
1554 valid(
1555 r#"x = [[[1]]]
1556"#,
1557 5,
1558 str![[r#"
1559x = [
1560 [
1561 [
1562 1,
1563 ],
1564 ],
1565]
1566
1567"#]],
1568 );
1569 }
1570
1571 #[test]
1572 fn deeply_nested_partial_reflow() {
1573 valid(
1574 r#"x = [[[1]]]
1575"#,
1576 10,
1577 str![[r#"
1578x = [
1579 [[1]],
1580]
1581
1582"#]],
1583 );
1584 }
1585
1586 #[test]
1587 fn array_with_inline_table() {
1588 valid(
1589 r#"deps = [{name = "foo"}, {name = "bar"}]
1590"#,
1591 30,
1592 str![[r#"
1593deps = [
1594 {name = "foo"},
1595 {name = "bar"},
1596]
1597
1598"#]],
1599 );
1600 }
1601
1602 #[test]
1603 fn empty_array_not_reflowed() {
1604 valid(
1605 r#"deps = []
1606"#,
1607 10,
1608 str![[r#"
1609deps = []
1610
1611"#]],
1612 );
1613 }
1614
1615 #[test]
1616 fn array_at_exact_max_width() {
1617 valid(
1618 r#"a = [1, 2]
1619"#,
1620 10,
1621 str![[r#"
1622a = [1, 2]
1623
1624"#]],
1625 );
1626 }
1627
1628 #[test]
1629 fn array_one_over_max_width() {
1630 valid(
1631 r#"a = [1, 2]
1632"#,
1633 9,
1634 str![[r#"
1635a = [
1636 1,
1637 2,
1638]
1639
1640"#]],
1641 );
1642 }
1643
1644 #[test]
1645 fn max_width_zero_reflows_everything() {
1646 valid(
1647 r#"a = [1]
1648"#,
1649 0,
1650 str![[r#"
1651a = [
1652 1,
1653]
1654
1655"#]],
1656 );
1657 }
1658
1659 #[test]
1660 fn max_width_max_reflows_nothing() {
1661 valid(
1662 r#"deps = ["foo", "bar", "baz", "qux", "quux"]
1663"#,
1664 usize::MAX,
1665 str![[r#"
1666deps = ["foo", "bar", "baz", "qux", "quux"]
1667
1668"#]],
1669 );
1670 }
1671
1672 #[test]
1673 fn long_inline_table_not_reflowed() {
1674 valid(
1675 r#"deps = [{name = "very-long-name", version = "1.0.0", features = ["a", "b"]}]
1676"#,
1677 40,
1678 str![[r#"
1679deps = [
1680 {name = "very-long-name", version = "1.0.0", features = ["a", "b"]},
1681]
1682
1683"#]],
1684 );
1685 }
1686
1687 #[test]
1688 fn inline_table_containing_array() {
1689 valid(
1690 r#"dep = [{features = ["a", "b", "c"]}]
1691"#,
1692 20,
1693 str![[r#"
1694dep = [
1695 {features = ["a", "b", "c"]},
1696]
1697
1698"#]],
1699 );
1700 }
1701
1702 #[test]
1703 fn nested_inline_tables() {
1704 valid(
1705 r#"items = [{outer = {inner = "value"}}]
1706"#,
1707 20,
1708 str![[r#"
1709items = [
1710 {outer = {inner = "value"}},
1711]
1712
1713"#]],
1714 );
1715 }
1716
1717 #[test]
1718 fn array_with_comments() {
1719 valid(
1720 r#"deps = ["foo", "bar"] # comment
1721"#,
1722 20,
1723 str![[r#"
1724deps = [
1725 "foo",
1726 "bar",
1727] # comment
1728
1729"#]],
1730 );
1731 }
1732
1733 #[test]
1734 fn array_with_trailing_comma() {
1735 valid(
1736 r#"deps = ["foo", "bar",]
1737"#,
1738 15,
1739 str![[r#"
1740deps = [
1741 "foo",
1742 "bar",
1743]
1744
1745"#]],
1746 );
1747 }
1748
1749 #[test]
1750 fn very_long_single_element() {
1751 valid(
1752 r#"deps = ["this-is-a-very-long-package-name"]
1753"#,
1754 20,
1755 str![[r#"
1756deps = [
1757 "this-is-a-very-long-package-name",
1758]
1759
1760"#]],
1761 );
1762 }
1763
1764 #[test]
1765 fn array_in_table_section() {
1766 valid(
1767 r#"[package]
1768keywords = ["cli", "toml", "formatter"]
1769"#,
1770 30,
1771 str![[r#"
1772[package]
1773keywords = [
1774 "cli",
1775 "toml",
1776 "formatter",
1777]
1778
1779"#]],
1780 );
1781 }
1782
1783 #[test]
1784 fn unicode_values_in_array() {
1785 valid(
1786 r#"names = ["日本語", "中文", "한국어"]
1787"#,
1788 20,
1789 str![[r#"
1790names = [
1791 "日本語",
1792 "中文",
1793 "한국어",
1794]
1795
1796"#]],
1797 );
1798 }
1799
1800 #[test]
1801 fn multiline_string_in_array() {
1802 valid(
1806 r#"items = ["""
1807multi
1808line
1809"""]
1810"#,
1811 10,
1812 str![[r#"
1813items = [
1814 """
1815multi
1816line
1817""",
1818]
1819
1820"#]],
1821 );
1822 }
1823
1824 #[test]
1825 fn vertical_multiline_string_collapses_when_fits() {
1826 valid(
1828 r#"x = [
1829 """
1830multi
1831""",
1832]
1833"#,
1834 80,
1835 str![[r#"
1837x = ["""
1838multi
1839"""]
1840
1841"#]],
1842 );
1843 }
1844
1845 #[test]
1846 fn multiline_literal_string_preserved() {
1847 valid(
1849 r#"x = [
1850 '''
1851literal
1852''',
1853]
1854"#,
1855 80,
1856 str![[r#"
1857x = ['''
1858literal
1859''']
1860
1861"#]],
1862 );
1863 }
1864
1865 #[test]
1866 fn dotted_key_width_included() {
1867 valid(
1869 r#"foo.bar.baz = ["a", "b"]
1870"#,
1871 23,
1872 str![[r#"
1873foo.bar.baz = [
1874 "a",
1875 "b",
1876]
1877
1878"#]],
1879 );
1880 }
1881
1882 #[test]
1883 fn dotted_key_at_exact_width() {
1884 valid(
1885 r#"foo.bar.baz = ["a", "b"]
1886"#,
1887 24,
1888 str![[r#"
1889foo.bar.baz = ["a", "b"]
1890
1891"#]],
1892 );
1893 }
1894
1895 #[test]
1896 fn quoted_key() {
1897 valid(
1898 r#""my.key" = ["x", "y"]
1899"#,
1900 15,
1901 str![[r#"
1902"my.key" = [
1903 "x",
1904 "y",
1905]
1906
1907"#]],
1908 );
1909 }
1910
1911 #[test]
1912 fn literal_strings() {
1913 valid(
1914 r#"paths = ['foo', 'bar']
1915"#,
1916 15,
1917 str![[r#"
1918paths = [
1919 'foo',
1920 'bar',
1921]
1922
1923"#]],
1924 );
1925 }
1926
1927 #[test]
1928 fn mixed_types_in_array() {
1929 valid(
1930 r#"mixed = [1, "two", true, 3.14]
1931"#,
1932 20,
1933 str![[r#"
1934mixed = [
1935 1,
1936 "two",
1937 true,
1938 3.14,
1939]
1940
1941"#]],
1942 );
1943 }
1944
1945 #[test]
1946 fn multiple_arrays_same_section() {
1947 valid(
1949 r#"[pkg]
1950a = [1, 2, 3]
1951b = [4, 5, 6, 7, 8]
1952"#,
1953 15,
1954 str![[r#"
1955[pkg]
1956a = [1, 2, 3]
1957b = [
1958 4,
1959 5,
1960 6,
1961 7,
1962 8,
1963]
1964
1965"#]],
1966 );
1967 }
1968
1969 #[test]
1970 fn array_at_start_of_file() {
1971 valid(
1972 r#"x = ["a", "b", "c"]
1973"#,
1974 15,
1975 str![[r#"
1976x = [
1977 "a",
1978 "b",
1979 "c",
1980]
1981
1982"#]],
1983 );
1984 }
1985
1986 #[test]
1987 fn empty_string_elements() {
1988 valid(
1989 r#"x = ["", "a", ""]
1990"#,
1991 12,
1992 str![[r#"
1993x = [
1994 "",
1995 "a",
1996 "",
1997]
1998
1999"#]],
2000 );
2001 }
2002
2003 #[test]
2004 fn nested_only_inner_exceeds() {
2005 valid(
2006 r#"x = [[1, 2, 3, 4]]
2007"#,
2008 12,
2009 str![[r#"
2010x = [
2011 [
2012 1,
2013 2,
2014 3,
2015 4,
2016 ],
2017]
2018
2019"#]],
2020 );
2021 }
2022
2023 #[test]
2024 fn very_long_key_array_still_reflows() {
2025 valid(
2026 r#"this_is_a_very_long_key = [1]
2027"#,
2028 20,
2029 str![[r#"
2030this_is_a_very_long_key = [
2031 1,
2032]
2033
2034"#]],
2035 );
2036 }
2037
2038 #[test]
2041 fn vertical_collapses_when_fits() {
2042 valid(
2043 r#"x = [
2044 "a",
2045 "b",
2046]
2047"#,
2048 40,
2049 str![[r#"
2050x = ["a", "b"]
2051
2052"#]],
2053 );
2054 }
2055
2056 #[test]
2057 fn vertical_stays_when_too_wide() {
2058 valid(
2060 r#"x = [
2061 "aaa",
2062 "bbb",
2063]
2064"#,
2065 10,
2066 str![[r#"
2067x = [
2068 "aaa",
2069 "bbb",
2070]
2071
2072"#]],
2073 );
2074 }
2075
2076 #[test]
2077 fn mixed_style_collapses_when_fits() {
2078 valid(
2079 r#"x = ["a", "b",
2080 "c"]
2081"#,
2082 40,
2083 str![[r#"
2084x = ["a", "b", "c"]
2085
2086"#]],
2087 );
2088 }
2089
2090 #[test]
2091 fn mixed_style_normalizes_when_too_wide() {
2092 valid(
2094 r#"x = ["aaa", "bbb",
2095 "ccc"]
2096"#,
2097 10,
2098 str![[r#"
2099x = [
2100 "aaa",
2101 "bbb",
2102 "ccc",
2103]
2104
2105"#]],
2106 );
2107 }
2108
2109 #[test]
2110 fn vertical_with_comment_stays_vertical() {
2111 valid(
2113 r#"x = [
2114 "a", # comment
2115 "b",
2116]
2117"#,
2118 80,
2119 str![[r#"
2120x = [
2121 "a", # comment
2122 "b",
2123]
2124
2125"#]],
2126 );
2127 }
2128
2129 #[test]
2130 fn mixed_style_with_comment_normalized() {
2131 valid(
2134 r#"x = ["a", "b", # comment
2135 "c",
2136]
2137"#,
2138 80,
2139 str![[r#"
2140x = [
2141 "a", "b", # comment
2142 "c",
2143]
2144
2145"#]],
2146 );
2147 }
2148
2149 #[test]
2150 fn grouped_elements_with_comments_normalized() {
2151 valid(
2154 r#"deps = [
2155 "a", "b", "c",
2156 "aaaaaaaaaaaa", "bbbbbbbbbbbb", "cccccccccccc", # comment about this group
2157 "x", "y", "z", # fits
2158]
2159"#,
2160 60,
2161 str![[r#"
2162deps = [
2163 "a",
2164 "b",
2165 "c",
2166 "aaaaaaaaaaaa",
2167 "bbbbbbbbbbbb",
2168 "cccccccccccc", # comment about this group
2169 "x",
2170 "y",
2171 "z", # fits
2172]
2173
2174"#]],
2175 );
2176 }
2177
2178 #[test]
2179 fn standalone_comment_groups_horizontally() {
2180 valid(
2183 r#"deps = [
2184 "a",
2185 "b",
2186 # comment about elements below
2187 "c",
2188 "d",
2189]
2190"#,
2191 200,
2192 str![[r#"
2193deps = [
2194 "a", "b", # comment about elements below
2195 "c", "d",
2196]
2197
2198"#]],
2199 );
2200 }
2201
2202 #[test]
2203 fn comment_on_last_element_collapses() {
2204 valid(
2207 r#"x = [
2208 "a",
2209 "b", # comment
2210]
2211"#,
2212 80,
2213 str![[r#"
2214x = [
2215 "a", "b", # comment
2216]
2217
2218"#]],
2219 );
2220 }
2221
2222 #[test]
2223 fn comment_before_close_stays_vertical() {
2224 valid(
2227 r#"x = [
2228 "a",
2229 "b",
2230 # trailing comment
2231]
2232"#,
2233 80,
2234 str![[r#"
2235x = [
2236 "a", "b",
2237 # trailing comment
2238]
2239
2240"#]],
2241 );
2242 }
2243
2244 #[test]
2245 fn nested_vertical_collapses() {
2246 valid(
2247 r#"x = [
2248 [
2249 1
2250 ],
2251 [
2252 2
2253 ],
2254]
2255"#,
2256 40,
2257 str![[r#"
2258x = [[1], [2]]
2259
2260"#]],
2261 );
2262 }
2263
2264 #[test]
2265 fn collapse_removes_trailing_comma() {
2266 valid(
2268 r#"x = [
2269 "a",
2270 "b",
2271]
2272"#,
2273 40,
2274 str![[r#"
2275x = ["a", "b"]
2276
2277"#]],
2278 );
2279 }
2280
2281 #[test]
2282 fn collapse_normalizes_spacing() {
2283 valid(
2285 r#"x = [
2286 "a" ,
2287 "b" ,
2288]
2289"#,
2290 40,
2291 str![[r#"
2292x = ["a", "b"]
2293
2294"#]],
2295 );
2296 }
2297
2298 #[test]
2301 fn cjk_double_width_causes_reflow() {
2302 valid(
2305 r#"a = ["日"]
2306"#,
2307 9,
2308 str![[r#"
2309a = [
2310 "日",
2311]
2312
2313"#]],
2314 );
2315 }
2316
2317 #[test]
2318 fn cjk_double_width_fits_at_correct_width() {
2319 valid(
2322 r#"a = ["日"]
2323"#,
2324 10,
2325 str![[r#"
2326a = ["日"]
2327
2328"#]],
2329 );
2330 }
2331
2332 #[test]
2333 fn emoji_double_width_causes_reflow() {
2334 valid(
2336 r#"a = ["🎉"]
2337"#,
2338 9,
2339 str![[r#"
2340a = [
2341 "🎉",
2342]
2343
2344"#]],
2345 );
2346 }
2347
2348 #[test]
2349 fn emoji_double_width_fits_at_correct_width() {
2350 valid(
2352 r#"a = ["🎉"]
2353"#,
2354 10,
2355 str![[r#"
2356a = ["🎉"]
2357
2358"#]],
2359 );
2360 }
2361
2362 #[test]
2363 fn combining_character_zero_width() {
2364 valid(
2368 "a = [\"e\u{0301}\"]\n",
2369 9,
2370 "a = [\"e\u{0301}\"]\n",
2372 );
2373 }
2374
2375 #[test]
2376 fn combining_character_reflows_at_boundary() {
2377 valid(
2379 "a = [\"e\u{0301}\"]\n",
2380 8,
2381 "a = [\n \"e\u{0301}\",\n]\n",
2383 );
2384 }
2385
2386 #[test]
2387 fn vertical_cjk_collapses_at_correct_width() {
2388 valid(
2390 r#"x = [
2391 "日",
2392 "月",
2393]
2394"#,
2395 16,
2396 str![[r#"
2397x = ["日", "月"]
2398
2399"#]],
2400 );
2401 }
2402
2403 #[test]
2404 fn vertical_cjk_stays_vertical_when_too_wide() {
2405 valid(
2408 r#"x = [
2409 "日",
2410 "月",
2411]
2412"#,
2413 15,
2414 str![[r#"
2415x = [
2416 "日",
2417 "月",
2418]
2419
2420"#]],
2421 );
2422 }
2423
2424 #[test]
2425 fn deeply_nested_within_limit() {
2426 let nested = "x = [[[[[[[[[[1]]]]]]]]]]\n";
2427 valid(
2428 nested,
2429 5,
2430 str![[r#"
2431x = [
2432 [
2433 [
2434 [
2435 [
2436 [
2437 [
2438 [
2439 [
2440 [
2441 1,
2442 ],
2443 ],
2444 ],
2445 ],
2446 ],
2447 ],
2448 ],
2449 ],
2450 ],
2451]
2452
2453"#]],
2454 );
2455 }
2456
2457 #[test]
2460 fn tabs_in_array_counted_as_tab_spaces() {
2461 valid("x = [\t1]\n", 11, "x = [\t1]\n");
2465 }
2466
2467 #[test]
2468 fn tabs_in_array_cause_reflow_at_boundary() {
2469 valid("x = [\t1]\n", 10, "x = [\n \t1,\n]\n");
2473 }
2474
2475 #[test]
2476 fn tabs_between_elements_normalized_on_collapse() {
2477 valid(
2479 "x = [\n\t1,\n\t2,\n]\n",
2480 40,
2481 str![[r#"
2482x = [1, 2]
2483
2484"#]],
2485 );
2486 }
2487
2488 #[test]
2489 fn multiple_tabs_expand_correctly() {
2490 valid(
2491 "x = [\t\t1]\n",
2492 12,
2493 str![[r#"
2494x = [
2495 1,
2496]
2497
2498"#]],
2499 );
2500 }
2501
2502 #[test]
2505 fn vertical_outer_with_long_horizontal_inner_expands_inner() {
2506 valid(
2509 r#"x = [
2510 [1, 2, 3, 4, 5],
2511]
2512"#,
2513 15,
2514 str![[r#"
2515x = [
2516 [
2517 1,
2518 2,
2519 3,
2520 4,
2521 5,
2522 ],
2523]
2524
2525"#]],
2526 );
2527 }
2528
2529 #[test]
2530 fn vertical_outer_with_short_horizontal_inner_collapses() {
2531 valid(
2532 r#"x = [
2533 [1, 2],
2534]
2535"#,
2536 40,
2537 str![[r#"
2538x = [[1, 2]]
2539
2540"#]],
2541 );
2542 }
2543
2544 #[test]
2545 fn horizontal_outer_fits_stays_horizontal() {
2546 valid(
2547 r#"x = [[1], [2]]
2548"#,
2549 20,
2550 str![[r#"
2551x = [[1], [2]]
2552
2553"#]],
2554 );
2555 }
2556
2557 #[test]
2558 fn outer_expands_inner_fits() {
2559 valid(
2560 r#"x = [[1], [2]]
2561"#,
2562 10,
2563 str![[r#"
2564x = [
2565 [1],
2566 [2],
2567]
2568
2569"#]],
2570 );
2571 }
2572
2573 #[test]
2574 fn outer_expands_inner_also_expands() {
2575 valid(
2576 r#"x = [[1, 2, 3], [4, 5, 6]]
2577"#,
2578 10,
2579 str![[r#"
2580x = [
2581 [
2582 1,
2583 2,
2584 3,
2585 ],
2586 [
2587 4,
2588 5,
2589 6,
2590 ],
2591]
2592
2593"#]],
2594 );
2595 }
2596
2597 #[test]
2598 fn mixed_nesting_all_inner_fit() {
2599 valid(
2600 r#"x = [[1], [2], [3]]
2601"#,
2602 15,
2603 str![[r#"
2604x = [
2605 [1],
2606 [2],
2607 [3],
2608]
2609
2610"#]],
2611 );
2612 }
2613
2614 #[test]
2615 fn mixed_nesting_one_inner_expands() {
2616 valid(
2617 r#"x = [[1], [2, 3, 4, 5], [6]]
2618"#,
2619 15,
2620 str![[r#"
2621x = [
2622 [1],
2623 [
2624 2,
2625 3,
2626 4,
2627 5,
2628 ],
2629 [6],
2630]
2631
2632"#]],
2633 );
2634 }
2635
2636 #[test]
2637 fn three_level_nesting_all_expand() {
2638 valid(
2641 r#"x = [[[1, 2]]]
2642"#,
2643 5,
2644 str![[r#"
2645x = [
2646 [
2647 [
2648 1,
2649 2,
2650 ],
2651 ],
2652]
2653
2654"#]],
2655 );
2656 }
2657
2658 #[test]
2659 fn three_level_nesting_small_width() {
2660 valid(
2662 r#"x = [[[1]]]
2663"#,
2664 8,
2665 str![[r#"
2666x = [
2667 [
2668 [
2669 1,
2670 ],
2671 ],
2672]
2673
2674"#]],
2675 );
2676 }
2677
2678 #[test]
2679 fn empty_vertical_array_collapses() {
2680 valid(
2681 r#"x = [
2682]
2683"#,
2684 80,
2685 str![[r#"
2686x = []
2687
2688"#]],
2689 );
2690 }
2691
2692 #[test]
2693 fn empty_vertical_array_with_whitespace_collapses() {
2694 valid(
2695 r#"x = [
2696
2697]
2698"#,
2699 80,
2700 str![[r#"
2701x = []
2702
2703"#]],
2704 );
2705 }
2706
2707 #[test]
2708 fn long_string_width_at_boundary() {
2709 valid(
2710 r#"x = ["abcdefghij"]
2711"#,
2712 18,
2713 str![[r#"
2714x = ["abcdefghij"]
2715
2716"#]],
2717 );
2718 }
2719
2720 #[test]
2721 fn long_string_width_causes_reflow() {
2722 valid(
2723 r#"x = ["abcdefghij"]
2724"#,
2725 17,
2726 str![[r#"
2727x = [
2728 "abcdefghij",
2729]
2730
2731"#]],
2732 );
2733 }
2734
2735 #[test]
2736 fn string_with_special_chars() {
2737 valid(
2741 r#"x = ["a-b_c.d"]
2742"#,
2743 14,
2744 str![[r#"
2745x = [
2746 "a-b_c.d",
2747]
2748
2749"#]],
2750 );
2751 }
2752
2753 #[test]
2754 fn array_with_only_whitespace_preserved() {
2755 valid(
2756 r#"x = [ ]
2757"#,
2758 20,
2759 str![[r#"
2760x = [ ]
2761
2762"#]],
2763 );
2764 }
2765}