makepad_widget/
rusteditor.rs

1use makepad_render::*;
2
3use crate::textbuffer::*;
4use crate::texteditor::*;
5
6#[derive(Clone)]
7pub struct RustEditor {
8    pub text_editor: TextEditor,
9}
10
11impl RustEditor {
12    pub fn proto(cx: &mut Cx) -> Self {
13        let editor = Self {
14            text_editor: TextEditor::proto(cx),
15        };
16        //tab.animator.default = tab.anim_default(cx);
17        editor
18    }
19      
20    pub fn handle_rust_editor(&mut self, cx: &mut Cx, event: &mut Event, text_buffer: &mut TextBuffer) -> TextEditorEvent {
21        let ce = self.text_editor.handle_text_editor(cx, event, text_buffer);
22        match ce {
23            TextEditorEvent::AutoFormat => {
24                let formatted = RustTokenizer::auto_format(text_buffer, false).out_lines;
25                self.text_editor.cursors.replace_lines_formatted(formatted, text_buffer);
26                self.text_editor.view.redraw_view_area(cx);
27            },
28            _ => ()
29        }
30        ce
31    }
32    
33    pub fn draw_rust_editor(&mut self, cx: &mut Cx, text_buffer: &mut TextBuffer) {
34        if text_buffer.needs_token_chunks() && text_buffer.lines.len() >0 {
35            let mut state = TokenizerState::new(&text_buffer.lines);
36            let mut tokenizer = RustTokenizer::new();
37            let mut pair_stack = Vec::new();
38            loop {
39                let offset = text_buffer.flat_text.len();
40                let token_type = tokenizer.next_token(&mut state, &mut text_buffer.flat_text, &text_buffer.token_chunks);
41                TokenChunk::push_with_pairing(&mut text_buffer.token_chunks, &mut pair_stack, state.next, offset, text_buffer.flat_text.len(), token_type);
42                if token_type == TokenType::Eof {
43                    break
44                }
45            }
46        }
47        
48        if self.text_editor.begin_text_editor(cx, text_buffer).is_err() {return}
49        
50        for (index, token_chunk) in text_buffer.token_chunks.iter_mut().enumerate() {
51            self.text_editor.draw_chunk(cx, index, &text_buffer.flat_text, token_chunk, &text_buffer.messages.cursors);
52        }
53        
54        self.text_editor.end_text_editor(cx, text_buffer);
55    }
56}
57
58pub struct RustTokenizer {
59    pub comment_single: bool,
60    pub comment_depth: usize,
61    pub in_string: bool
62}
63
64impl RustTokenizer {
65    pub fn new() -> RustTokenizer {
66        RustTokenizer {
67            comment_single: false,
68            comment_depth: 0,
69            in_string: false
70        }
71    }
72    
73    pub fn next_token<'a>(&mut self, state: &mut TokenizerState<'a>, chunk: &mut Vec<char>, token_chunks: &Vec<TokenChunk>) -> TokenType {
74        let start = chunk.len();
75        //chunk.truncate(0);
76        if self.in_string{
77            if state.next == ' ' || state.next == '\t'{
78                while state.next == ' ' || state.next == '\t'{
79                    chunk.push(state.next);
80                    state.advance_with_cur();
81                }
82                return TokenType::Whitespace;
83            }
84            loop {
85                if state.next == '\0' {
86                    self.in_string = false;
87                    return TokenType::StringChunk
88                }
89                else if state.next == '\n' {
90                    if (chunk.len() - start)>0 {
91                        return TokenType::StringChunk
92                    }
93                    chunk.push(state.next);
94                    state.advance_with_cur();
95                    return TokenType::Newline
96                }
97                else if state.next == '"' && state.cur != '\\'  {
98                    if (chunk.len() - start)>0 {
99                        return TokenType::StringChunk
100                    }
101                    chunk.push(state.next);
102                    state.advance_with_cur();
103                    self.in_string = false;
104                    return TokenType::StringMultiEnd
105                }
106                else {
107                    chunk.push(state.next);
108                    state.advance_with_cur();
109                }
110            } 
111            
112        }
113        else if self.comment_depth >0 { // parse comments
114            loop {
115                if state.next == '\0' {
116                    self.comment_depth = 0;
117                    return TokenType::CommentChunk
118                }
119                if state.next == '/' {
120                    chunk.push(state.next);
121                    state.advance();
122                    if state.next == '*' {
123                        chunk.push(state.next);
124                        state.advance();
125                        self.comment_depth += 1;
126                    }
127                }
128                else if state.next == '*' {
129                    chunk.push(state.next);
130                    state.advance();
131                    if state.next == '/' {
132                        self.comment_depth -= 1;
133                        chunk.push(state.next);
134                        state.advance();
135                        if self.comment_depth == 0 {
136                            return TokenType::CommentMultiEnd
137                        }
138                    }
139                }
140                else if state.next == '\n' {
141                    if self.comment_single {
142                        self.comment_depth = 0;
143                    }
144                    // output current line
145                    if (chunk.len() - start)>0 {
146                        return TokenType::CommentChunk
147                    }
148                    
149                    chunk.push(state.next);
150                    state.advance();
151                    return TokenType::Newline
152                }
153                else if state.next == ' ' {
154                    if (chunk.len() - start)>0 {
155                        return TokenType::CommentChunk
156                    }
157                    while state.next == ' ' {
158                        chunk.push(state.next);
159                        state.advance();
160                    }
161                    return TokenType::Whitespace
162                }
163                else {
164                    chunk.push(state.next);
165                    state.advance();
166                }
167            }
168        }
169        else {
170            state.advance_with_cur();
171            match state.cur {
172                '\0' => { // eof insert a terminating space and end
173                    chunk.push(' ');
174                    return TokenType::Eof
175                },
176                '\n' => {
177                    chunk.push('\n');
178                    return TokenType::Newline
179                },
180                ' ' | '\t' => { // eat as many spaces as possible
181                    chunk.push(state.cur);
182                    while state.next == ' ' || state.next == '\t'{
183                        chunk.push(state.next);
184                        state.advance();
185                    }
186                    return TokenType::Whitespace;
187                },
188                '/' => { // parse comment
189                    chunk.push(state.cur);
190                    if state.next == '/' {
191                        chunk.push(state.next);
192                        state.advance();
193                        self.comment_depth = 1;
194                        self.comment_single = true;
195                        return TokenType::CommentLine;
196                    }
197                    if state.next == '*' { // start parsing a multiline comment
198                        //let mut comment_depth = 1;
199                        chunk.push(state.next);
200                        state.advance();
201                        self.comment_single = false;
202                        self.comment_depth = 1;
203                        return TokenType::CommentMultiBegin;
204                    }
205                    if state.next == '=' {
206                        chunk.push(state.next);
207                        state.advance();
208                    }
209                    return TokenType::Operator;
210                },
211                '\'' => { // parse char literal or lifetime annotation
212                    chunk.push(state.cur);
213                    
214                    if Self::parse_rust_escape_char(state, chunk) { // escape char or unicode
215                        if state.next == '\'' { // parsed to closing '
216                            chunk.push(state.next);
217                            state.advance();
218                            return TokenType::String;
219                        }
220                        return TokenType::TypeName;
221                    }
222                    else { // parse a single char or lifetime
223                        let offset = state.offset;
224                        let (is_ident, _) = Self::parse_rust_ident_tail(state, chunk);
225                        if is_ident && ((state.offset - offset) >1 || state.next != '\'') {
226                            return TokenType::TypeName;
227                        }
228                        if state.next != '\n' {
229                            if (state.offset - offset) == 0 { // not an identifier char
230                                chunk.push(state.next);
231                                state.advance();
232                            }
233                            if state.next == '\'' { // lifetime identifier
234                                chunk.push(state.next);
235                                state.advance();
236                            }
237                            return TokenType::String;
238                        }
239                        return TokenType::String;
240                    }
241                },
242                '"' => { // parse string
243                    chunk.push(state.cur);
244                    state.prev = '\0';
245                    while state.next != '\0' && state.next != '\n' {
246                        if state.next != '"' || state.prev != '\\' && state.cur == '\\' && state.next == '"' {
247                            chunk.push(state.next);
248                            state.advance_with_prev();
249                        }
250                        else {
251                            chunk.push(state.next);
252                            state.advance();
253                            return TokenType::String;
254                        }
255                    };
256                    if state.next == '\n'{
257                        self.in_string = true;
258                        return TokenType::StringMultiBegin;
259                    }
260                    return TokenType::String;
261                },
262                '0'..='9' => { // try to parse numbers
263                    chunk.push(state.cur);
264                    Self::parse_rust_number_tail(state, chunk);
265                    return TokenType::Number;
266                },
267                ':' => {
268                    chunk.push(state.cur);
269                    if state.next == ':' {
270                        chunk.push(state.next);
271                        state.advance();
272                        return TokenType::Namespace;
273                    }
274                    return TokenType::Colon;
275                },
276                '*' => {
277                    chunk.push(state.cur);
278                    if state.next == '=' {
279                        chunk.push(state.next);
280                        state.advance();
281                        return TokenType::Operator;
282                    }
283                    if state.next == '/' {
284                        chunk.push(state.next);
285                        state.advance();
286                        return TokenType::Unexpected;
287                    }
288                    return TokenType::Operator;
289                },
290                '^' => {
291                    chunk.push(state.cur);
292                    if state.next == '=' {
293                        chunk.push(state.next);
294                        state.advance();
295                    }
296                    return TokenType::Operator;
297                },
298                '+' => {
299                    chunk.push(state.cur);
300                    if state.next == '=' {
301                        chunk.push(state.next);
302                        state.advance();
303                    }
304                    return TokenType::Operator;
305                },
306                '-' => {
307                    chunk.push(state.cur);
308                    if state.next == '>' || state.next == '=' {
309                        chunk.push(state.next);
310                        state.advance();
311                    }
312                    return TokenType::Operator;
313                },
314                '=' => {
315                    chunk.push(state.cur);
316                    if state.next == '>' || state.next == '=' {
317                        chunk.push(state.next);
318                        state.advance();
319                    }
320                    
321                    return TokenType::Operator;
322                },
323                '.' => {
324                    chunk.push(state.cur);
325                    if state.next == '.' {
326                        chunk.push(state.next);
327                        state.advance();
328                        if state.next == '=' {
329                            chunk.push(state.next);
330                            state.advance();
331                            return TokenType::Splat;
332                        }
333                        return TokenType::Splat;
334                    }
335                    return TokenType::Operator;
336                },
337                ';' => {
338                    chunk.push(state.cur);
339                    if state.next == '.' {
340                        chunk.push(state.next);
341                        state.advance();
342                    }
343                    return TokenType::Delimiter;
344                },
345                '&' => {
346                    chunk.push(state.cur);
347                    if state.next == '&' || state.next == '=' {
348                        chunk.push(state.next);
349                        state.advance();
350                    }
351                    return TokenType::Operator;
352                },
353                '|' => {
354                    chunk.push(state.cur);
355                    if state.next == '|' || state.next == '=' {
356                        chunk.push(state.next);
357                        state.advance();
358                    }
359                    return TokenType::Operator;
360                },
361                '!' => {
362                    chunk.push(state.cur);
363                    if state.next == '=' {
364                        chunk.push(state.next);
365                        state.advance();
366                    }
367                    return TokenType::Operator;
368                },
369                '<' => {
370                    chunk.push(state.cur);
371                    if state.next == '=' || state.next == '<' {
372                        chunk.push(state.next);
373                        state.advance();
374                    }
375                    return TokenType::Operator;
376                },
377                '>' => {
378                    chunk.push(state.cur);
379                    if state.next == '=' {
380                        chunk.push(state.next);
381                        state.advance();
382                    }
383                    return TokenType::Operator;
384                },
385                ',' => {
386                    chunk.push(state.cur);
387                    return TokenType::Delimiter;
388                },
389                '(' | '{' | '[' => {
390                    chunk.push(state.cur);
391                    return TokenType::ParenOpen;
392                },
393                ')' | '}' | ']' => {
394                    chunk.push(state.cur);
395                    return TokenType::ParenClose;
396                },
397                '#' => {
398                    chunk.push(state.cur);
399                    return TokenType::Hash;
400                },
401                '_' => {
402                    chunk.push(state.cur);
403                    Self::parse_rust_ident_tail(state, chunk);
404                    if state.next == '(' {
405                        return TokenType::Call;
406                    }
407                    return TokenType::Identifier;
408                },
409                'a'..='z' => { // try to parse keywords or identifiers
410                    chunk.push(state.cur);
411                    
412                    let keyword_type = Self::parse_rust_lc_keyword(state, chunk, token_chunks);
413                    let (is_ident, _) = Self::parse_rust_ident_tail(state, chunk);
414                    if is_ident {
415                        if state.next == '(' {
416                            return TokenType::Call;
417                        }
418                        return TokenType::Identifier;
419                    }
420                    else {
421                        return keyword_type
422                    }
423                },
424                'A'..='Z' => {
425                    chunk.push(state.cur);
426                    let mut is_keyword = false;
427                    if state.cur == 'S' {
428                        if state.keyword(chunk, "elf") {
429                            is_keyword = true;
430                        }
431                    }
432                    let (is_ident, has_underscores) = Self::parse_rust_ident_tail(state, chunk);
433                    if is_ident {
434                        is_keyword = false;
435                    }
436                    if has_underscores {
437                        return TokenType::ThemeName;
438                    }
439                    if is_keyword {
440                        return TokenType::Keyword;
441                    }
442                    return TokenType::TypeName;
443                },
444                _ => {
445                    chunk.push(state.cur);
446                    return TokenType::Operator;
447                }
448            }
449        }
450    }
451    
452    fn parse_rust_ident_tail<'a>(state: &mut TokenizerState<'a>, chunk: &mut Vec<char>) -> (bool,bool) {
453        let mut ret = false;
454        let mut has_underscores = false;
455        while state.next_is_digit() || state.next_is_letter() || state.next == '_' || state.next == '$' {
456            if state.next == '_'{
457                has_underscores = true;
458            }
459            ret = true;
460            chunk.push(state.next);
461            state.advance();
462        }
463        (ret, has_underscores)
464    }
465    
466    
467    fn parse_rust_escape_char<'a>(state: &mut TokenizerState<'a>, chunk: &mut Vec<char>) -> bool {
468        if state.next == '\\' {
469            chunk.push(state.next);
470            state.advance();
471            if state.next == 'u' {
472                chunk.push(state.next);
473                state.advance();
474                if state.next == '{' {
475                    chunk.push(state.next);
476                    state.advance();
477                    while state.next_is_hex() {
478                        chunk.push(state.next);
479                        state.advance();
480                    }
481                    if state.next == '}' {
482                        chunk.push(state.next);
483                        state.advance();
484                    }
485                }
486            }
487            else if state.next != '\n' && state.next != '\0' {
488                // its a single char escape TODO limit this to valid escape chars
489                chunk.push(state.next);
490                state.advance();
491            }
492            return true
493        }
494        return false
495    }
496    fn parse_rust_number_tail<'a>(state: &mut TokenizerState<'a>, chunk: &mut Vec<char>) {
497        if state.next == 'x' { // parse a hex number
498            chunk.push(state.next);
499            state.advance();
500            while state.next_is_hex() || state.next == '_' {
501                chunk.push(state.next);
502                state.advance();
503            }
504        }
505        else if state.next == 'b' { // parse a binary
506            chunk.push(state.next);
507            state.advance();
508            while state.next == '0' || state.next == '1' || state.next == '_' {
509                chunk.push(state.next);
510                state.advance();
511            }
512        }
513        else if state.next == 'o' { // parse a octal
514            chunk.push(state.next);
515            state.advance();
516            while state.next == '0' || state.next == '1' || state.next == '2'
517                || state.next == '3' || state.next == '4' || state.next == '5'
518                || state.next == '6' || state.next == '7' || state.next == '_' {
519                chunk.push(state.next);
520                state.advance();
521            }
522        }
523        else {
524            while state.next_is_digit() || state.next == '_' {
525                chunk.push(state.next);
526                state.advance();
527            }
528            if state.next == 'u' || state.next == 'i' {
529                chunk.push(state.next);
530                state.advance();
531                if state.keyword(chunk, "8") {
532                }
533                else if state.keyword(chunk, "16") {
534                }
535                else if state.keyword(chunk, "32") {
536                }
537                else if state.keyword(chunk, "64") {
538                }
539            }
540            else if state.next == '.' || state.next == 'f' || state.next == 'e' || state.next == 'E' {
541                if state.next == '.' || state.next == 'f' {
542                    chunk.push(state.next);
543                    state.advance();
544                    while state.next_is_digit() || state.next == '_' {
545                        chunk.push(state.next);
546                        state.advance();
547                    }
548                }
549                if state.next == 'E' || state.next == 'e' {
550                    chunk.push(state.next);
551                    state.advance();
552                    if state.next == '+' || state.next == '-'{
553                        chunk.push(state.next);
554                        state.advance();
555                        while state.next_is_digit() || state.next == '_' {
556                            chunk.push(state.next);
557                            state.advance();
558                        }
559                    }
560                    else {
561                        return
562                    }
563                }
564                if state.next == 'f' { // the f32, f64 postfix
565                    chunk.push(state.next);
566                    state.advance();
567                    if state.keyword(chunk, "32") {
568                    }
569                    else if state.keyword(chunk, "64") {
570                    }
571                }
572            }
573        }
574    }
575    
576    fn parse_rust_lc_keyword<'a>(state: &mut TokenizerState<'a>, chunk: &mut Vec<char>, token_chunks: &Vec<TokenChunk>) -> TokenType {
577        match state.cur {
578            'a' => {
579                if state.keyword(chunk, "s") {
580                    return TokenType::Keyword
581                }
582            },
583            'b' => {
584                if state.keyword(chunk, "reak") {
585                    return TokenType::Flow
586                }
587                if state.keyword(chunk, "ool") {
588                    return TokenType::BuiltinType
589                }
590            },
591            'c' => {
592                if state.keyword(chunk, "on") {
593                    if state.keyword(chunk, "st") {
594                        return TokenType::Keyword
595                    }
596                    if state.keyword(chunk, "tinue") {
597                        return TokenType::Flow
598                    }
599                }
600                if state.keyword(chunk, "rate") {
601                    return TokenType::Keyword
602                }
603                if state.keyword(chunk, "har") {
604                    return TokenType::BuiltinType
605                }
606            },
607            'd' =>{
608                if state.keyword(chunk, "yn") {
609                    return TokenType::Keyword
610                } 
611            },
612            'e' => {
613                if state.keyword(chunk, "lse") {
614                    return TokenType::Flow
615                }
616                if state.keyword(chunk, "num") {
617                    return TokenType::TypeDef
618                }
619                if state.keyword(chunk, "xtern") {
620                    return TokenType::Keyword
621                }
622            },
623            'f' => {
624                if state.keyword(chunk, "alse") {
625                    return TokenType::Bool
626                }
627                if state.keyword(chunk, "n") {
628                    return TokenType::Fn
629                }
630                if state.keyword(chunk, "or") {
631                    // check if we are first on a line
632                    if token_chunks.len() <2
633                        || token_chunks[token_chunks.len() - 1].token_type == TokenType::Newline
634                        || token_chunks[token_chunks.len() - 2].token_type == TokenType::Newline
635                        && token_chunks[token_chunks.len() - 1].token_type == TokenType::Whitespace {
636                        return TokenType::Looping;
637                        //self.code_editor.set_indent_color(self.code_editor.colors.indent_line_looping);
638                    }
639                    
640                    return TokenType::Keyword;
641                    // self.code_editor.set_indent_color(self.code_editor.colors.indent_line_def);
642                }
643                
644                if state.keyword(chunk, "32") {
645                    return TokenType::BuiltinType
646                }
647                if state.keyword(chunk, "64") {
648                    return TokenType::BuiltinType
649                }
650            },
651            'i' => {
652                if state.keyword(chunk, "f") {
653                    return TokenType::Flow
654                }
655                if state.keyword(chunk, "mpl") {
656                    return TokenType::TypeDef
657                }
658                if state.keyword(chunk, "size") {
659                    return TokenType::BuiltinType
660                }
661                if state.keyword(chunk, "n") {
662                    return TokenType::Keyword
663                }
664                if state.keyword(chunk, "8") {
665                    return TokenType::BuiltinType
666                }
667                if state.keyword(chunk, "16") {
668                    return TokenType::BuiltinType
669                }
670                if state.keyword(chunk, "32") {
671                    return TokenType::BuiltinType
672                }
673                if state.keyword(chunk, "64") {
674                    return TokenType::BuiltinType
675                }
676            },
677            'l' => {
678                if state.keyword(chunk, "et") {
679                    return TokenType::Keyword
680                }
681                if state.keyword(chunk, "oop") {
682                    return TokenType::Looping
683                }
684            },
685            'm' => {
686                if state.keyword(chunk, "atch") {
687                    return TokenType::Flow
688                }
689                if state.keyword(chunk, "ut") {
690                    return TokenType::Keyword
691                }
692                if state.keyword(chunk, "o") {
693                    if state.keyword(chunk, "d") {
694                        return TokenType::Keyword
695                    }
696                    if state.keyword(chunk, "ve") {
697                        return TokenType::Keyword
698                    }
699                }
700            },
701            'p' => { // pub
702                if state.keyword(chunk, "ub") {
703                    return TokenType::Keyword
704                }
705            },
706            'r' => {
707                if state.keyword(chunk, "e") {
708                    if state.keyword(chunk, "f") {
709                        return TokenType::Keyword
710                    }
711                    if state.keyword(chunk, "turn") {
712                        return TokenType::Flow
713                    }
714                }
715            },
716            's' => {
717                if state.keyword(chunk, "elf") {
718                    return TokenType::Keyword
719                }
720                if state.keyword(chunk, "uper") {
721                    return TokenType::Keyword
722                }
723                if state.keyword(chunk, "t") {
724                    if state.keyword(chunk, "atic") {
725                        return TokenType::Keyword
726                    }
727                    if state.keyword(chunk, "r") {
728                        if state.keyword(chunk, "uct"){
729                            return TokenType::TypeDef
730                        }
731                        return TokenType::BuiltinType
732                    }
733                }
734            },
735            't' => {
736                if state.keyword(chunk, "ype") {
737                    return TokenType::Keyword
738                }
739                if state.keyword(chunk, "r") {
740                    if state.keyword(chunk, "ait") {
741                        return TokenType::TypeDef
742                    }
743                    if state.keyword(chunk, "ue") {
744                        return TokenType::Bool
745                    }
746                }
747            },
748            'u' => { // use
749                
750                if state.keyword(chunk, "nsafe") {
751                    return TokenType::Keyword
752                }
753                if state.keyword(chunk, "8") {
754                    return TokenType::BuiltinType
755                }
756                if state.keyword(chunk, "16") {
757                    return TokenType::BuiltinType
758                }
759                if state.keyword(chunk, "32") {
760                    return TokenType::BuiltinType
761                }
762                if state.keyword(chunk, "64") {
763                    return TokenType::BuiltinType
764                }
765                if state.keyword(chunk, "s") {
766                    if state.keyword(chunk, "ize") {
767                        return TokenType::BuiltinType
768                    }
769                    if state.keyword(chunk, "e") {
770                        return TokenType::Keyword
771                    }
772                }
773            },
774            'w' => { // use
775                if state.keyword(chunk, "h") {
776                    if state.keyword(chunk, "ere") {
777                        return TokenType::Keyword
778                    }
779                    if state.keyword(chunk, "ile") {
780                        return TokenType::Looping
781                    }
782                }
783            },
784            
785            _ => {}
786        }
787        if state.next == '(' {
788            return TokenType::Call;
789        }
790        else {
791            return TokenType::Identifier;
792        }
793    }
794    
795    // because rustfmt is such an insane shitpile to compile or use as a library, here is a stupid version.
796    pub fn auto_format(text_buffer: &mut TextBuffer, force_newlines:bool) -> FormatOutput {
797        
798        // extra spacey setting that rustfmt seems to do, but i don't like
799        let extra_spacey = false;
800        let pre_spacey = true;
801        
802        let mut out = FormatOutput::new();
803        let mut tp = TokenParser::new(&text_buffer.flat_text, &text_buffer.token_chunks);
804        
805        struct ParenStack {
806            expecting_newlines: bool,
807            expected_indent: usize,
808            angle_counter: usize
809        }
810        
811        let mut paren_stack: Vec<ParenStack> = Vec::new();
812        
813        paren_stack.push(ParenStack {
814            expecting_newlines: true,
815            expected_indent: 0,
816            angle_counter: 0
817        });
818        out.new_line();
819        
820        let mut first_on_line = true;
821        let mut first_after_open = false;
822        let mut expected_indent = 0;
823        let mut is_unary_operator = true;
824        let mut in_multline_comment = false;
825        let mut in_singleline_comment = false;
826        let mut in_multiline_string = false;
827        while tp.advance() {
828            
829            match tp.cur_type() {
830                TokenType::Whitespace => {
831                    if in_singleline_comment || in_multline_comment{
832                        out.extend(tp.cur_chunk());
833                    }
834                    else if !first_on_line && tp.next_type() != TokenType::Newline
835                        && tp.prev_type() != TokenType::ParenOpen
836                        && tp.prev_type() != TokenType::Namespace
837                        && tp.prev_type() != TokenType::Delimiter
838                        && (tp.prev_type() != TokenType::Operator || (tp.prev_char() == '>' || tp.prev_char() == '<')) {
839                        out.add_space();
840                    }
841                },
842                TokenType::Newline => {
843                    in_singleline_comment = false;
844                    //paren_stack.last_mut().unwrap().angle_counter = 0;
845                    if  in_singleline_comment || in_multline_comment || in_multiline_string{
846                        out.new_line();
847                        first_on_line = true;
848                    }
849                    else{
850                        if first_on_line {
851                            out.indent(expected_indent);
852                        }
853                        else {
854                            out.strip_space();
855                        }
856                        if first_after_open {
857                            paren_stack.last_mut().unwrap().expecting_newlines = true;
858                            expected_indent += 4;
859                        }
860                        if paren_stack.last_mut().unwrap().expecting_newlines { // only insert when expecting newlines
861                            first_after_open = false;
862                            out.new_line();
863                            first_on_line = true;
864                        }
865                    }
866                },
867                TokenType::Eof => {break},
868                TokenType::ParenOpen => {
869                    if first_on_line {
870                        out.indent(expected_indent);
871                    }
872                    
873                    paren_stack.push(ParenStack {
874                        expecting_newlines: force_newlines,
875                        expected_indent: expected_indent,
876                        angle_counter: 0
877                    });
878                    first_after_open = true;
879                    is_unary_operator = true;
880                    
881                    let is_curly = tp.cur_char() == '{';
882                    if tp.cur_char() == '(' && (
883                        tp.prev_type() == TokenType::Flow || tp.prev_type() == TokenType::Looping || tp.prev_type() == TokenType::Keyword
884                    ) {
885                        out.add_space();
886                    }
887                    if pre_spacey && is_curly && !first_on_line && tp.prev_type() != TokenType::Namespace {
888                        if tp.prev_char() != ' ' && tp.prev_char() != '{'
889                            && tp.prev_char() != '[' && tp.prev_char() != '(' && tp.prev_char() != ':' {
890                            out.add_space();
891                        }
892                    }
893                    else if !pre_spacey {
894                        out.strip_space();
895                    }
896                    
897                    out.extend(tp.cur_chunk());
898                    
899                    if extra_spacey && is_curly && tp.next_type() != TokenType::Newline {
900                        out.add_space();
901                    }
902                    first_on_line = false;
903                },
904                TokenType::ParenClose => {
905                    
906                    out.strip_space();
907                    
908                    let expecting_newlines = paren_stack.last().unwrap().expecting_newlines;
909                    
910                    if extra_spacey && tp.cur_char() == '}' && !expecting_newlines {
911                        out.add_space();
912                    }
913                    
914                    first_after_open = false;
915                    if !first_on_line && expecting_newlines { // we are expecting newlines!
916                        out.new_line();
917                        first_on_line = true;
918                    }
919                    
920                    expected_indent = if paren_stack.len()>1 {
921                        paren_stack.pop().unwrap().expected_indent
922                    }
923                    else {
924                        0
925                    };
926                    if first_on_line {
927                        first_on_line = false;
928                        out.indent(expected_indent);
929                    }
930                    if tp.cur_char() == '}' {
931                        is_unary_operator = true;
932                    }
933                    else {
934                        is_unary_operator = false;
935                    }
936                    out.extend(tp.cur_chunk());
937                },
938                TokenType::CommentLine => {
939                    in_singleline_comment = true;
940                    if first_on_line {
941                        first_on_line = false;
942                        out.indent(expected_indent);
943                    }
944                    else {
945                        out.add_space();
946                    }
947                    out.extend(tp.cur_chunk());
948                },
949                TokenType::CommentMultiBegin => {
950                    in_multline_comment = true;
951                    if first_on_line {
952                        first_on_line = false;
953                        out.indent(expected_indent);
954                    }
955                    out.extend(tp.cur_chunk());
956                },
957                TokenType::CommentChunk => {
958                    if first_on_line {
959                        first_on_line = false;
960                    }
961                    out.extend(tp.cur_chunk());
962                },
963                TokenType::CommentMultiEnd => {
964                    in_multline_comment = false;
965                    if first_on_line {
966                        first_on_line = false;
967                    }
968                    out.extend(tp.cur_chunk());
969                },
970                TokenType::StringMultiBegin => {
971                    in_multiline_string = true;
972                    if first_on_line {
973                        first_on_line = false;
974                        out.indent(expected_indent);
975                    }
976                    expected_indent += 4;
977                    out.extend(tp.cur_chunk());
978                },
979                TokenType::StringChunk => {
980                    if first_on_line {
981                        first_on_line = false;
982                        out.indent(expected_indent);
983                    }
984                    out.extend(tp.cur_chunk());
985                },
986                TokenType::StringMultiEnd => {
987                    expected_indent -= 4;
988                    in_multiline_string = false;
989                    if first_on_line {
990                        first_on_line = false;
991                        out.indent(expected_indent);
992                    }
993                    out.extend(tp.cur_chunk());
994                },
995                TokenType::Colon => {
996                    is_unary_operator = true;
997                    out.strip_space();
998                    out.extend(tp.cur_chunk());
999                    if tp.next_type() != TokenType::Whitespace && tp.next_type() != TokenType::Newline {
1000                        out.add_space();
1001                    }
1002                },
1003                TokenType::Delimiter => {
1004                    if first_on_line {
1005                        first_on_line = false;
1006                        out.indent(expected_indent);
1007                    }
1008                    else {
1009                        out.strip_space();
1010                    }
1011                    out.extend(tp.cur_chunk());
1012                    if paren_stack.last_mut().unwrap().angle_counter == 0 // otherwise our generics multiline
1013                        && paren_stack.last().unwrap().expecting_newlines == true
1014                        && tp.next_type() != TokenType::Newline { // we are expecting newlines!
1015                        // scan forward to see if we really need a newline.
1016                        for next in (tp.index + 1)..tp.tokens.len() {
1017                            if tp.tokens[next].token_type == TokenType::Newline {
1018                                break;
1019                            }
1020                            if !tp.tokens[next].token_type.should_ignore() {
1021                                out.new_line();
1022                                first_on_line = true;
1023                                break;
1024                            }
1025                        }
1026                    }
1027                    else if tp.next_type() != TokenType::Newline {
1028                        out.add_space();
1029                    }
1030                    is_unary_operator = true;
1031                },
1032                TokenType::Operator => {
1033                    
1034                    // detect ++ and -- and execute insert or delete macros
1035                    
1036                    let mut is_closing_angle = false;
1037                    if tp.cur_char() == '<' {
1038                        paren_stack.last_mut().unwrap().angle_counter += 1;
1039                    }
1040                    else if tp.cur_char() == '>' {
1041                        let last = paren_stack.last_mut().unwrap();
1042                        last.angle_counter = last.angle_counter.max(1) - 1;
1043                        is_closing_angle = true;
1044                    }
1045                    else if tp.cur_char() != '&' && tp.cur_char() != '*' { // anything else resets the angle counter
1046                        paren_stack.last_mut().unwrap().angle_counter = 0
1047                    }
1048                    else {
1049                        paren_stack.last_mut().unwrap().angle_counter = 0
1050                    }
1051                    
1052                    if first_on_line {
1053                        first_on_line = false;
1054                        let extra_indent = if is_closing_angle || is_unary_operator {0}else {4};
1055                        out.indent(expected_indent + extra_indent);
1056                    }
1057                    
1058                    if (is_unary_operator && (tp.cur_char() == '-' || tp.cur_char() == '*' || tp.cur_char() == '&'))
1059                        || tp.cur_char() == '!' || tp.cur_char() == '.' || tp.cur_char() == '<' || tp.cur_char() == '>' {
1060                        out.extend(tp.cur_chunk());
1061                    }
1062                    else {
1063                        out.add_space();
1064                        out.extend(tp.cur_chunk());
1065                        if tp.next_type() != TokenType::Newline {
1066                            out.add_space();
1067                        }
1068                    }
1069                    
1070                    is_unary_operator = true;
1071                },
1072                TokenType::Identifier | TokenType::BuiltinType | TokenType::TypeName | TokenType::ThemeName=> { // these dont reset the angle counter
1073                    is_unary_operator = false;
1074                    
1075                    first_after_open = false;
1076                    if first_on_line {
1077                        first_on_line = false;
1078                        let extra_indent = if paren_stack.last_mut().unwrap().angle_counter >0 {4}else {0};
1079                        out.indent(expected_indent + extra_indent);
1080                    }
1081                    out.extend(tp.cur_chunk());
1082                },
1083                TokenType::Namespace => {
1084                    is_unary_operator = true;
1085                    
1086                    first_after_open = false;
1087                    if first_on_line {
1088                        first_on_line = false;
1089                        out.indent(expected_indent);
1090                    }
1091                    out.extend(tp.cur_chunk());
1092                },
1093                // these are followed by unary operators (some)
1094                TokenType::TypeDef | TokenType::Fn | TokenType::Hash | TokenType::Splat |
1095                TokenType::Keyword | TokenType::Flow | TokenType::Looping => {
1096                    is_unary_operator = true;
1097                    paren_stack.last_mut().unwrap().angle_counter = 0;
1098                    
1099                    first_after_open = false;
1100                    if first_on_line {
1101                        first_on_line = false;
1102                        out.indent(expected_indent);
1103                    }
1104                    out.extend(tp.cur_chunk());
1105                },
1106                // these are followeable by non unary operators
1107                TokenType::Call | TokenType::String | TokenType::Regex | TokenType::Number |
1108                TokenType::Bool | TokenType::Unexpected | TokenType::Error | TokenType::Warning | TokenType::Defocus=> {
1109                    is_unary_operator = false;
1110                    paren_stack.last_mut().unwrap().angle_counter = 0;
1111                    
1112                    first_after_open = false;
1113                    if first_on_line {
1114                        first_on_line = false;
1115                        out.indent(expected_indent);
1116                    }
1117                    out.extend(tp.cur_chunk());
1118                    
1119                },
1120            }
1121        };
1122        out
1123    }
1124}