1use crate::errwarn::{Error, ErrorGenerator};
2use crate::lexer;
3
4use crate::token::tokentypes::LexerTokenType as LexTokenType;
5use crate::token::LexerToken as LexToken;
6
7use crate::token::tokentypes::ParserTokenType as TokenType;
8use crate::token::ParserToken as Token;
9use crate::util::{get_lang, SupportedLanguage};
10use std::fmt;
11
12#[derive(Clone)]
13enum BlockToken {
14 İse(usize),
15 İken(usize),
16 İkiNoktaNokta(usize),
17 İşlev(usize),
18 Blok(usize),
19}
20
21impl fmt::Debug for BlockToken {
22 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
23 match self {
24 Self::İse(_) => write!(f, "ise"),
25 Self::İken(_) => write!(f, "iken"),
26 Self::İkiNoktaNokta(_) => write!(f, ":."),
27 Self::İşlev(_) => write!(f, "işlev"),
28 Self::Blok(_) => write!(f, "blok"),
29 }
30 }
31}
32
33impl BlockToken {
34 fn unwrap_inner(&self) -> usize {
35 match self {
36 Self::İse(u) | Self::İken(u) | Self::İkiNoktaNokta(u)
37 | Self::İşlev(u) | Self::Blok(u) => *u,
38 }
39 }
40}
41
42#[derive(Clone)]
43pub struct Parser {
44 tokens: Vec<LexToken>,
45}
46
47impl Parser {
48 pub fn new(tokens: Vec<LexToken>) -> Self {
49 Self {
50 tokens: Self::preproc(tokens),
51 }
52 }
53
54 pub fn from_lexer(lexer: &mut lexer::Lexer, file: String) -> Result<Self, Error> {
55 Ok(Self {
56 tokens: Self::preproc(lexer.tokenize(&mut vec![], file)?),
57 })
58 }
59
60 fn preproc(prog: Vec<LexToken>) -> Vec<LexToken> {
61 use crate::token::Precedence;
62
63 let mut stack: Vec<LexToken> = vec![];
64 let mut current = 0;
65 let mut tokens = vec![];
66
67 while current < prog.len() {
68 let i = prog.get(current).unwrap();
69 match i.precedence {
70 Precedence::None => {
71 tokens.push(i.clone());
72 current += 1;
73 }
74 Precedence::Reserved => {
75 while !stack.is_empty() {
76 tokens.push(stack.pop().unwrap());
77 }
78 tokens.push(i.clone());
79 current += 1;
80 }
81 Precedence::Precedence(u) => {
82 while !stack.is_empty()
83 && match stack.last().unwrap().precedence {
84 Precedence::Precedence(x) => x > u,
85 Precedence::ParenL => false,
86 _ => unreachable!(),
87 }
88 {
89 tokens.push(stack.pop().unwrap());
90 }
91 stack.push(i.clone());
92 current += 1;
93 }
94 Precedence::ParenL => {
95 stack.push(i.clone());
96 current += 1;
97 }
98 Precedence::ParenR => {
99 while !stack.is_empty()
100 && match stack.last().unwrap().precedence {
101 Precedence::ParenL => false,
102 _ => true,
103 }
104 {
105 tokens.push(stack.pop().unwrap());
106 }
107 if !stack.is_empty()
108 && matches!(stack.last().unwrap().precedence, Precedence::ParenL)
109 {
110 stack.pop().unwrap();
111 }
112 current += 1;
113 }
114 Precedence::Comma => {
115 while !stack.is_empty()
116 && match stack.last().unwrap().precedence {
117 Precedence::ParenL => false,
118 _ => true,
119 }
120 {
121 tokens.push(stack.pop().unwrap());
122 }
123 current += 1;
124 }
125 }
126 }
127 while !stack.is_empty() {
128 tokens.push(stack.pop().unwrap());
129 }
130 tokens
131 }
132
133 pub fn parse(&mut self) -> Result<Vec<Token>, Error> {
137 let mut parsed: Vec<Token> = vec![];
138 let mut blocktokens: Vec<BlockToken> = vec![];
139 let mut rets: Vec<Vec<usize>> = vec![vec![]];
140
141 for (ip, ptoken) in self.tokens.iter().enumerate() {
142 match ptoken.typ {
143 LexTokenType::InScopeParentR => parsed.push(Token::new(
144 TokenType::InScopeParentR,
145 ptoken.line,
146 ptoken.col,
147 ptoken.file.clone(),
148 )),
149 LexTokenType::InScopeParentL => parsed.push(Token::new(
150 TokenType::InScopeParentL,
151 ptoken.line,
152 ptoken.col,
153 ptoken.file.clone(),
154 )),
155 LexTokenType::İkiNokta => parsed.push(Token::new(
156 TokenType::İkiNokta,
157 ptoken.line,
158 ptoken.col,
159 ptoken.file.clone(),
160 )),
161 LexTokenType::Blok => {
162 parsed.push(Token::new(
163 TokenType::Blok,
164 ptoken.line,
165 ptoken.col,
166 ptoken.file.clone(),
167 ));
168 blocktokens.push(BlockToken::Blok(ip));
169 }
170 LexTokenType::Hiç => parsed.push(Token::new(
171 TokenType::Hiç,
172 ptoken.line,
173 ptoken.col,
174 ptoken.file.clone(),
175 )),
176 LexTokenType::Ver => {
177 parsed.push(Token::new(
178 TokenType::Ver { tp: None },
179 ptoken.line,
180 ptoken.col,
181 ptoken.file.clone(),
182 ));
183 if let Some(last) = rets.last_mut() {
184 last.push(ip);
185 } else { unreachable!() }
186 }
187 LexTokenType::Yükle | LexTokenType::Comma => unreachable!(),
188 LexTokenType::ParenR | LexTokenType::ParenL => {
189 return Err(match get_lang() {
190 SupportedLanguage::Turkish => ErrorGenerator::error(
191 "SözdizimHatası",
192 "kapatılmamış parantez",
193 ptoken.line, ptoken.col, ptoken.file.clone(),
194 None
195 ),
196 SupportedLanguage::English => ErrorGenerator::error(
197 "SyntaxError",
198 "unclosed parenthesis",
199 ptoken.line, ptoken.col, ptoken.file.clone(),
200 None
201 ),
202 });
203 }
204 LexTokenType::İşlev => {
205 blocktokens.push(BlockToken::İşlev(ip));
206 parsed.push(Token::new(
207 TokenType::İşlev { sonloc: None },
208 ptoken.line,
209 ptoken.col,
210 ptoken.file.clone(),
211 ));
212 rets.push(vec![]);
213 }
214 LexTokenType::At => {
215 parsed.push(Token::new(
216 TokenType::At,
217 ptoken.line,
218 ptoken.col,
219 ptoken.file.clone(),
220 ));
221 }
222 LexTokenType::Sayı => {
223 parsed.push(Token::new(
224 TokenType::Sayı {
225 val: ptoken.lexeme.as_str().parse().unwrap(),
226 },
227 ptoken.line,
228 ptoken.col,
229 ptoken.file.clone(),
230 ));
231 }
232 LexTokenType::Yazı => parsed.push(Token::new(
233 TokenType::Yazı {
234 val: ptoken.lexeme.clone(),
235 },
236 ptoken.line,
237 ptoken.col,
238 ptoken.file.clone(),
239 )),
240 LexTokenType::Identifier => parsed.push(Token::new(
241 TokenType::Identifier {
242 id: ptoken.lexeme.clone(),
243 },
244 ptoken.line,
245 ptoken.col,
246 ptoken.file.clone(),
247 )),
248 LexTokenType::İkiNoktaNokta => {
249 blocktokens.push(BlockToken::İkiNoktaNokta(ip));
250 parsed.push(Token::new(
251 TokenType::İkiNoktaNokta,
252 ptoken.line,
253 ptoken.col,
254 ptoken.file.clone(),
255 ))
256 }
257 LexTokenType::İken => {
258 let last_blocktoken = blocktokens.last().unwrap();
259 match last_blocktoken {
260 BlockToken::İkiNoktaNokta(_) => (),
261 a => {
262 let o = parsed.get(a.unwrap_inner()).unwrap().clone();
263 return Err(match get_lang() {
264 SupportedLanguage::Turkish => ErrorGenerator::error(
265 "SözdizimHatası",
266 &format!(
267 "kapatılmamış blok {:?}",
268 a
269 ),
270 o.line, o.col, o.file,
271 None,
272 ),
273 SupportedLanguage::English => ErrorGenerator::error(
274 "SyntaxError",
275 &format!(
276 "unclosed block {:?}",
277 a
278 ),
279 o.line, o.col, o.file,
280 None,
281 ),
282 });
283 },
284 };
285 blocktokens.push(BlockToken::İken(ip));
286 parsed.push(Token::new(
287 TokenType::İken(None),
288 ptoken.line,
289 ptoken.col,
290 ptoken.file.clone(),
291 ));
292 }
293 LexTokenType::İse => {
294 blocktokens.push(BlockToken::İse(ip));
295 parsed.push(Token::new(
296 TokenType::İse(None),
297 ptoken.line,
298 ptoken.col,
299 ptoken.file.clone(),
300 ));
301 }
302 LexTokenType::Yoksa => {
303 let last_blocktoken = blocktokens.pop().unwrap();
304 match last_blocktoken {
305 BlockToken::İse(bip) => {
306 let ise = &mut parsed[bip];
307 match ise.typ {
308 TokenType::İse(ref mut yoksa) => {
309 yoksa.replace(ip + 1);
310 }
311 _ => unreachable!(),
312 }
313 ip + 1
314 }
315 _ => unimplemented!(),
316 };
317 blocktokens.push(BlockToken::İse(ip));
318 parsed.push(Token::new(
319 TokenType::Yoksa(None),
320 ptoken.line,
321 ptoken.col,
322 ptoken.file.clone(),
323 ));
324 }
325 LexTokenType::Son => {
326 let last_blocktoken = blocktokens.pop().unwrap();
327 match last_blocktoken {
328 BlockToken::İse(bip) => {
329 let ise = &mut parsed[bip];
330 match ise.typ {
331 TokenType::İse(ref mut yoksa) => {
332 yoksa.replace(ip);
333 }
334 TokenType::Yoksa(ref mut tp) => {
335 tp.replace(ip);
336 }
337 _ => unreachable!(),
338 }
339 let tp = ip + 1;
340 parsed.push(Token::new(
341 TokenType::Son { tp },
342 ptoken.line,
343 ptoken.col,
344 ptoken.file.clone(),
345 ));
346 }
347 BlockToken::İken(bip) => {
348 let iken = parsed.get_mut(bip).unwrap();
349 let tp: usize = match iken.typ {
350 TokenType::İken(ref mut tp) => {
351 tp.replace(ip + 1);
352 let blkiknk = blocktokens.pop().unwrap();
353 match blkiknk {
354 BlockToken::İkiNoktaNokta(iknkip) => {
355 let iknk = parsed.get_mut(iknkip).unwrap();
356 match iknk.typ {
357 TokenType::İkiNoktaNokta => iknkip,
358 _ => {
359 let o = iknk.clone();
360 return Err(match get_lang() {
361 SupportedLanguage::Turkish => ErrorGenerator::error(
362 "SözdizimHatası",
363 &format!(
364 "kapatılmamış blok {:?}",
365 o.repr()
366 ),
367 o.line, o.col, o.file,
368 None,
369 ),
370 SupportedLanguage::English => ErrorGenerator::error(
371 "SyntaxError",
372 &format!(
373 "unclosed block {:?}",
374 o.repr()
375 ),
376 o.line, o.col, o.file,
377 None,
378 ),
379 });
380 },
381 }
382 }
383 a => {
384 let o = parsed.get(a.unwrap_inner()).unwrap().clone();
385 return Err(match get_lang() {
386 SupportedLanguage::Turkish => ErrorGenerator::error(
387 "SözdizimHatası",
388 &format!(
389 "kapatılmamış blok {:?}",
390 a
391 ),
392 o.line, o.col, o.file,
393 None,
394 ),
395 SupportedLanguage::English => ErrorGenerator::error(
396 "SyntaxError",
397 &format!(
398 "unclosed block {:?}",
399 a
400 ),
401 o.line, o.col, o.file,
402 None,
403 ),
404 });
405 },
406 }
407 }
408 _ => unreachable!(),
409 };
410 parsed.push(Token::new(
411 TokenType::Son { tp },
412 ptoken.line,
413 ptoken.col,
414 ptoken.file.clone(),
415 ));
416 }
417 BlockToken::İşlev(bip) => {
418 let işlev = parsed.get_mut(bip).unwrap();
419 match işlev.typ {
420 TokenType::İşlev { ref mut sonloc } => {
421 sonloc.replace(ip);
422 }
423 _ => unreachable!(),
424 }
425 parsed.push(Token::new(
426 TokenType::İşlevSonlandır { tp: vec![] },
427 ptoken.line,
428 ptoken.col,
429 ptoken.file.clone(),
430 ));
431 let srets: Vec<usize> = rets.pop().unwrap();
432 for u in srets.iter() {
433 let sr = parsed.get_mut(*u).unwrap();
434 match sr.typ {
435 TokenType::Ver { ref mut tp } => {
436 *tp = Some(ip);
437 }
438 _ => unreachable!(),
439 }
440 }
441 }
442 BlockToken::Blok(_) => {
443 parsed.push(Token::new(
444 TokenType::BlokSonlandır,
445 ptoken.line,
446 ptoken.col,
447 ptoken.file.clone(),
448 ));
449 }
450 _ => unimplemented!(),
451 };
452 }
453 LexTokenType::Doğru => parsed.push(Token::new(
454 TokenType::Bool { val: true },
455 ptoken.line,
456 ptoken.col,
457 ptoken.file.clone(),
458 )),
459 LexTokenType::Yanlış => parsed.push(Token::new(
460 TokenType::Bool { val: false },
461 ptoken.line,
462 ptoken.col,
463 ptoken.file.clone(),
464 )),
465 LexTokenType::Artı => parsed.push(Token::new(
466 TokenType::Artı,
467 ptoken.line,
468 ptoken.col,
469 ptoken.file.clone(),
470 )),
471 LexTokenType::ArtıArtı => parsed.push(Token::new(
472 TokenType::ArtıArtı,
473 ptoken.line,
474 ptoken.col,
475 ptoken.file.clone(),
476 )),
477 LexTokenType::Eksi => parsed.push(Token::new(
478 TokenType::Eksi,
479 ptoken.line,
480 ptoken.col,
481 ptoken.file.clone(),
482 )),
483 LexTokenType::EksiEksi => parsed.push(Token::new(
484 TokenType::EksiEksi,
485 ptoken.line,
486 ptoken.col,
487 ptoken.file.clone(),
488 )),
489 LexTokenType::Çarpı => parsed.push(Token::new(
490 TokenType::Çarpı,
491 ptoken.line,
492 ptoken.col,
493 ptoken.file.clone(),
494 )),
495 LexTokenType::Bölü => parsed.push(Token::new(
496 TokenType::Bölü,
497 ptoken.line,
498 ptoken.col,
499 ptoken.file.clone(),
500 )),
501 LexTokenType::Modulo => parsed.push(Token::new(
502 TokenType::Modulo,
503 ptoken.line,
504 ptoken.col,
505 ptoken.file.clone(),
506 )),
507 LexTokenType::De => parsed.push(Token::new(
508 TokenType::De,
509 ptoken.line,
510 ptoken.col,
511 ptoken.file.clone(),
512 )),
513 LexTokenType::Girdi => parsed.push(Token::new(
514 TokenType::Girdi,
515 ptoken.line,
516 ptoken.col,
517 ptoken.file.clone(),
518 )),
519 LexTokenType::Kopya => parsed.push(Token::new(
520 TokenType::Kopya,
521 ptoken.line,
522 ptoken.col,
523 ptoken.file.clone(),
524 )),
525 LexTokenType::Koy => parsed.push(Token::new(
526 TokenType::Koy,
527 ptoken.line,
528 ptoken.col,
529 ptoken.file.clone(),
530 )),
531 LexTokenType::Büyüktür => parsed.push(Token::new(
532 TokenType::Büyüktür,
533 ptoken.line,
534 ptoken.col,
535 ptoken.file.clone(),
536 )),
537 LexTokenType::BüyükEşittir => parsed.push(Token::new(
538 TokenType::BüyükEşittir,
539 ptoken.line,
540 ptoken.col,
541 ptoken.file.clone(),
542 )),
543 LexTokenType::Küçüktür => parsed.push(Token::new(
544 TokenType::Küçüktür,
545 ptoken.line,
546 ptoken.col,
547 ptoken.file.clone(),
548 )),
549 LexTokenType::KüçükEşittir => parsed.push(Token::new(
550 TokenType::KüçükEşittir,
551 ptoken.line,
552 ptoken.col,
553 ptoken.file.clone(),
554 )),
555 LexTokenType::Eşittir => parsed.push(Token::new(
556 TokenType::Eşittir,
557 ptoken.line,
558 ptoken.col,
559 ptoken.file.clone(),
560 )),
561 LexTokenType::EşitDeğildir => parsed.push(Token::new(
562 TokenType::EşitDeğildir,
563 ptoken.line,
564 ptoken.col,
565 ptoken.file.clone(),
566 )),
567 LexTokenType::Değildir => parsed.push(Token::new(
568 TokenType::Değildir,
569 ptoken.line,
570 ptoken.col,
571 ptoken.file.clone(),
572 )),
573 LexTokenType::Takas => parsed.push(Token::new(
574 TokenType::Takas,
575 ptoken.line,
576 ptoken.col,
577 ptoken.file.clone(),
578 )),
579 LexTokenType::Döndür => parsed.push(Token::new(
580 TokenType::Döndür,
581 ptoken.line,
582 ptoken.col,
583 ptoken.file.clone(),
584 )),
585 LexTokenType::Üst => parsed.push(Token::new(
586 TokenType::Üst,
587 ptoken.line,
588 ptoken.col,
589 ptoken.file.clone(),
590 )),
591 LexTokenType::Ve => parsed.push(Token::new(
592 TokenType::Ve,
593 ptoken.line,
594 ptoken.col,
595 ptoken.file.clone(),
596 )),
597 LexTokenType::Veya => parsed.push(Token::new(
598 TokenType::Veya,
599 ptoken.line,
600 ptoken.col,
601 ptoken.file.clone(),
602 )),
603 LexTokenType::Tipinde => parsed.push(Token::new(
604 TokenType::Tipinde,
605 ptoken.line,
606 ptoken.col,
607 ptoken.file.clone(),
608 )),
609 LexTokenType::EOF => parsed.push(Token::new(
610 TokenType::EOF,
611 ptoken.line,
612 ptoken.col,
613 ptoken.file.clone(),
614 )),
615 }
616 }
617 Ok(parsed)
618 }
619}