1use std::collections::{HashMap, HashSet};
2use std::fmt::{Display, Formatter, Write};
3use proc_macro2::{Spacing, TokenTree};
4use quote::{quote, ToTokens};
5use syn::{Attribute, Ident, ItemUse, Path, Signature, Type};
6use crate::ast::{PathHolder, TypeHolder, TypePathHolder};
7use crate::composable::{GenericBoundsModel, GenericConversion, TraitModelPart1, TraitDecompositionPart1, TraitTypeModel};
8use crate::context::{GlobalContext, ScopeChain, TypeChain};
9use crate::conversion::{MixinKind, ObjectKind};
10use crate::tree::{ScopeTreeExportID, ScopeTreeExportItem, ScopeTreeItem};
11
12#[allow(unused)]
13pub fn format_imported_set(dict: &HashSet<ItemUse>) -> String {
14 let debug_imports = dict.iter().map(|i| {
15 i.to_token_stream()
16 }).collect::<Vec<_>>();
17 let all = quote!(#(#debug_imports,)*);
18 all.to_string()
19}
20
21#[allow(unused)]
22pub fn format_scope_refinement(dict: &Vec<(ScopeChain, HashMap<TypeHolder, ObjectKind>)>) -> String {
23 let mut iter = dict.iter()
24 .map(|(scope, types)|
25 format!("\t{}: \n\t\t{}", scope.self_path_holder_ref(), types.iter().map(scope_type_conversion_pair).collect::<Vec<_>>()
26 .join("\n\t")))
27 .collect::<Vec<String>>();
28 iter.sort();
29 iter.join("\n")
30
31}
32
33#[allow(unused)]
34pub fn format_type_holders(dict: &HashSet<TypeHolder>) -> String {
35 dict.iter()
36 .map(|item| item.0.to_token_stream().to_string())
38 .collect::<Vec<_>>()
39 .join("\n\n")
40}
41#[allow(unused)]
42pub fn format_type_holders_vec(dict: &Vec<TypeHolder>) -> String {
43 dict.iter()
44 .map(|item| item.0.to_token_stream().to_string())
46 .collect::<Vec<_>>()
47 .join("\n\n")
48}
49#[allow(unused)]
50pub fn format_types(dict: &HashSet<Type>) -> String {
51 dict.iter()
52 .map(|item| item.to_token_stream().to_string())
54 .collect::<Vec<_>>()
55 .join("\n\n")
56}
57
58#[allow(unused)]
59pub fn format_generic_conversions(dict: &HashMap<GenericConversion, HashSet<Option<Attribute>>>) -> String {
60 dict.iter()
61 .map(|(item, attrs)| format!("{}: {}", format_unique_attrs(attrs), item.object.to_token_stream()))
62 .collect::<Vec<_>>()
63 .join("\n\t")
64}
65#[allow(unused)]
66pub fn format_mixin_kinds(dict: &HashMap<MixinKind, HashSet<Option<Attribute>>>) -> String {
67 dict.iter()
68 .map(|(item, attrs)| format!("{}:\t {}", item, format_unique_attrs(attrs)))
69 .collect::<Vec<_>>()
70 .join("\n\t")
71}
72#[allow(unused)]
73pub fn format_mixin_conversions(dict: &HashMap<GenericBoundsModel, HashSet<Option<Attribute>>>) -> String {
74 dict.iter()
75 .map(|(item, attrs)| format!("{}:\n\t {}", item, format_unique_attrs(attrs)))
76 .collect::<Vec<_>>()
77 .join("\n\t")
78}
79
80#[allow(unused)]
81pub fn format_unique_attrs(dict: &HashSet<Option<Attribute>>) -> String {
82 dict.iter()
83 .map(|item| item.as_ref().map_or("[None]".to_string(), |a| a.to_token_stream().to_string()))
84 .collect::<Vec<_>>()
85 .join("\n\t")
86}
87
88pub fn format_attrs(dict: &Vec<Attribute>) -> String {
89 dict.iter()
90 .map(|item| item.to_token_stream().to_string())
91 .collect::<Vec<_>>()
92 .join("\n\t")
93}
94
95#[allow(unused)]
96pub fn format_imports(dict: &HashMap<ScopeChain, HashMap<PathHolder, Path>>) -> String {
97 let vec = scope_imports_dict(dict);
98 let expanded = quote!(#(#vec),*);
99 expanded.to_string()
100}
101
102#[allow(unused)]
103pub fn format_tree_exported_dict(dict: &HashMap<ScopeTreeExportID, ScopeTreeExportItem>) -> String {
104 dict.iter()
105 .map(|(ident, tree_item)| format!("{}: {}", ident, tree_item))
106 .collect::<Vec<_>>()
107 .join("\n\n")
108}
109
110#[allow(unused)]
111pub fn format_tree_item_dict(dict: &HashMap<ScopeTreeExportID, ScopeTreeItem>) -> String {
112 dict.iter()
113 .map(|(ident, tree_item)| format!("\t{}: {:?}", ident, tree_item))
114 .collect::<Vec<_>>()
115 .join("\n\n")
116}
117
118#[allow(unused)]
119pub fn scope_type_conversion_pair(dict: (&TypeHolder, &ObjectKind)) -> String {
120 format!("\t{}: {}", dict.0.to_token_stream(), dict.1)
121 }
123
124#[allow(unused)]
125pub fn refinement_pair(dict: (&TypeHolder, &Vec<ObjectKind>)) -> String {
126 format!("\t{}: \n\t\t{}", dict.0.to_token_stream(), dict.1.iter().map(|i| i.to_string()).collect::<Vec<_>>()
127 .join("\n\t"))
128 }
130#[allow(unused)]
138pub fn ident_type_conversion_pair(dict: (&Ident, &Type)) -> String {
139 format!("\t{}: {}", format_token_stream(dict.0), format_token_stream(dict.1))
140}
141
142#[allow(unused)]
143pub fn ident_signature_conversion_pair(dict: (&Ident, &Signature)) -> String {
144 format!("\t{}: {}", format_token_stream(dict.0), format_token_stream(dict.1))
145}
146
147#[allow(unused)]
148pub fn ident_trait_type_decomposition_conversion_pair(dict: (&Ident, &TraitTypeModel)) -> String {
149 format!("\t{}: {}", format_token_stream(dict.0), {
150 let TraitTypeModel { ident, trait_bounds, generics } = dict.1;
151 quote!(#ident: [bounds: #(#trait_bounds)*, generics: #generics])
152 })
153}
154fn format_ident_path_pair(pair: (&PathHolder, &Path)) -> String {
155 format!("\t{}: {}", format_token_stream(pair.0), format_token_stream(pair.1))
156}
157
158pub fn format_path_vec(vec: &Vec<Path>) -> String {
159 vec.iter().map(|p| p.to_token_stream().to_string()).collect::<Vec<_>>().join(",")
160}
161pub fn format_obj_vec(vec: &Vec<ObjectKind>) -> String {
162 vec.iter().map(|p| p.to_token_stream().to_string()).collect::<Vec<_>>().join(",")
163}
164
165#[allow(unused)]
166pub fn type_vec_path_conversion_pair(pair: (&Type, &Vec<Path>)) -> String {
167 format!("\t{}: [{}]", format_token_stream(pair.0), format_path_vec(pair.1))
168}
169#[allow(unused)]
170pub fn type_vec_obj_conversion_pair(pair: (&Type, &Vec<ObjectKind>)) -> String {
171 format!("\t{}: [{}]", format_token_stream(pair.0), format_obj_vec(pair.1))
172}
173#[allow(unused)]
174pub fn format_predicates_dict(vec: &HashMap<Type, Vec<Path>>) -> String {
175 vec.iter()
176 .map(type_vec_path_conversion_pair)
177 .collect::<Vec<_>>()
178 .join(",")
179}
180#[allow(unused)]
181pub fn format_predicates_obj_dict(vec: &HashMap<Type, Vec<ObjectKind>>) -> String {
182 vec.iter()
183 .map(type_vec_obj_conversion_pair)
184 .collect::<Vec<_>>()
185 .join(",")
186}
187
188#[allow(unused)]
189fn format_generic_bounds_pair(pair: (&TypePathHolder, &Vec<Path>)) -> String {
190 format!("\t{}: [{}]", format_token_stream(pair.0), format_path_vec(pair.1))
191}
192
193fn format_ident_trait_pair(pair: (&Ident, &TraitModelPart1)) -> String {
194 let implementors = &pair.1.implementors;
195 format!("\t{}: {}: [{}]", format_token_stream(pair.0), "...", quote!(#(#implementors),*))
196}
197
198#[allow(unused)]
199pub fn format_types_dict(dict: &HashMap<TypeHolder, ObjectKind>) -> String {
200 types_dict(dict)
201 .join("\n")
202}
203#[allow(unused)]
204pub fn format_types_to_refine(dict: &HashMap<TypeHolder, Vec<ObjectKind>>) -> String {
205 let mut iter = dict.iter()
206 .map(refinement_pair)
207 .collect::<Vec<String>>();
208 iter.sort();
209 iter.join("\n")
210}
211
212#[allow(unused)]
213pub fn format_ident_types_dict(dict: &HashMap<Ident, Type>) -> String {
214 ident_types_dict(dict)
215 .join("\n")
216}
217
218#[allow(unused)]
219pub fn format_scope_types_dict(dict: &HashMap<ScopeChain, TypeChain>) -> String {
220 dict.iter().map(|(scope, tc)| {
221 format!("{}: \n\t{}", scope.fmt_short(), format_types_dict(&tc.inner))
222 }).collect::<Vec<_>>()
223 .join("\n")
224}
225#[allow(unused)]
227pub fn format_used_traits(dict: &HashMap<ScopeChain, HashMap<Ident, TraitModelPart1>>) -> String {
228 scope_traits_dict(dict).join("\n")
229}
230
231pub fn format_token_stream<TT: ToTokens>(token_stream: TT) -> String {
236 let token_stream = token_stream.into_token_stream();
238 let mut formatted_string = String::new();
239 let mut space_needed = false;
240 let mut inside_angle_brackets = 0;
241 let mut inside_round_brackets = 0;
242 let mut last_token_was_ampersand = false;
244 let mut last_token_was_comma = false;
245 for token in token_stream {
248 if last_token_was_comma {
249 formatted_string.push(' ');
250 }
251 last_token_was_comma = false;
252 match token {
253 TokenTree::Ident(ident) => {
254 if last_token_was_ampersand && (ident == "mut" || ident.to_string().starts_with('\'')) {
256 formatted_string.pop(); } else if space_needed {
258 formatted_string.push(' ');
259 }
260 formatted_string.push_str(&ident.to_string());
261 space_needed = true;
262 last_token_was_ampersand = false;
263 }
264 TokenTree::Punct(punct) => {
265 match punct.as_char() {
266 ';' => {
267 formatted_string.push(';');
268 space_needed = true;
269 }
270 ':' => {
271 formatted_string.push(':');
272 space_needed = false;
273 }
274 '(' => {
275 inside_round_brackets += 1;
276 formatted_string.push('(');
277 space_needed = false;
278 }
279 ')' => {
280 inside_round_brackets -= 1;
281 formatted_string.push(')');
282 space_needed = true;
283 }
284 '<' => {
285 inside_angle_brackets += 1;
286 formatted_string.push('<');
287 space_needed = false;
288 }
289 '>' => {
290 inside_angle_brackets -= 1;
291 formatted_string.push('>');
292 space_needed = true;
293 }
294 ',' => {
305 formatted_string.push(',');
306 last_token_was_comma = true;
307 space_needed = true; }
309 '&' => {
310 formatted_string.push('&');
311 last_token_was_ampersand = true;
312 space_needed = false;
313 }
314 _ => {
315 if space_needed {
316 formatted_string.push(' ');
317 }
318 formatted_string.push(punct.as_char());
319 space_needed = punct.spacing() == Spacing::Alone;
320 }
321 }
322 }
323 TokenTree::Literal(literal) => {
324 if space_needed {
325 formatted_string.push(' ');
326 }
327 formatted_string.push_str(&literal.to_string());
328 space_needed = true;
329 last_token_was_ampersand = false;
330 }
331 TokenTree::Group(group) => {
332 if space_needed && (inside_angle_brackets == 0 || inside_round_brackets == 0) {
333 formatted_string.push(' ');
334 }
335 formatted_string.push_str(&format_token_stream(group.stream()));
336 space_needed = true;
337 last_token_was_ampersand = false;
338 }
339 }
340 }
341
342 formatted_string
343}
344
345
346
347pub fn imports_dict(dict: &HashMap<PathHolder, Path>) -> Vec<String> {
350 dict.iter()
351 .map(format_ident_path_pair)
352 .collect()
353}
354
355#[allow(unused)]
356pub fn generic_bounds_dict(dict: &HashMap<TypePathHolder, Vec<Path>>) -> Vec<String> {
357 dict.iter()
358 .map(format_generic_bounds_pair)
359 .collect()
360}
361
362pub fn types_dict(dict: &HashMap<TypeHolder, ObjectKind>) -> Vec<String> {
363 let mut iter = dict.iter()
364 .map(scope_type_conversion_pair)
365 .collect::<Vec<String>>();
366 iter.sort();
367 iter
368}
369fn ident_signatures_dict(dict: &HashMap<Ident, Signature>) -> Vec<String> {
370 let mut iter = dict.iter()
371 .map(ident_signature_conversion_pair)
372 .collect::<Vec<String>>();
373 iter.sort();
374 iter
375}
376
377
378fn ident_trait_type_decomposition_dict(dict: &HashMap<Ident, TraitTypeModel>) -> Vec<String> {
379 let mut iter = dict.iter()
380 .map(ident_trait_type_decomposition_conversion_pair)
381 .collect::<Vec<String>>();
382 iter.sort();
383 iter
384}
385
386fn ident_types_dict(dict: &HashMap<Ident, Type>) -> Vec<String> {
387 let mut iter = dict.iter()
388 .map(ident_type_conversion_pair)
389 .collect::<Vec<String>>();
390 iter.sort();
391 iter
392}
393
394fn traits_dict(dict: &HashMap<Ident, TraitModelPart1>) -> Vec<String> {
395 let mut iter = dict.iter()
396 .map(format_ident_trait_pair)
397 .collect::<Vec<String>>();
398 iter.sort();
399 iter
400}
401
402
403fn nested_scope_dict<K, K2, V2, F: Fn(&K, &HashMap<K2, V2>) -> String>(dict: &HashMap<K, HashMap<K2, V2>>, mapper: F) -> Vec<String> {
404 let mut iter = dict.iter()
405 .map(|(key, value)| mapper(key, value))
406 .collect::<Vec<String>>();
407 iter.sort();
408 iter
409}
410
411fn format_scope_dict<K2, V2, F: Fn(&HashMap<K2, V2>) -> Vec<String>>(dict: &HashMap<ScopeChain, HashMap<K2, V2>>, mapper: F) -> Vec<String> {
412 nested_scope_dict(dict, |scope, sub_dict|
413 format!("\t{}:\n\t\t{}", scope.fmt_short(), mapper(sub_dict).join("\n\t\t")))
414}
415
416pub fn scope_imports_dict(dict: &HashMap<ScopeChain, HashMap<PathHolder, Path>>) -> Vec<String> {
417 format_scope_dict(dict, imports_dict)
418}
419
420#[allow(unused)]
421pub fn scope_generics_dict(dict: &HashMap<ScopeChain, HashMap<TypePathHolder, Vec<Path>>>) -> Vec<String> {
422 format_scope_dict(dict, generic_bounds_dict)
423}
424
425
426fn scope_traits_dict(dict: &HashMap<ScopeChain, HashMap<Ident, TraitModelPart1>>) -> Vec<String> {
427 format_scope_dict(dict, traits_dict)
428}
429
430
431
432fn traits_impl_dict(dict: &HashMap<ScopeChain, Vec<PathHolder>>) -> Vec<String> {
433 let mut iter = dict.iter()
436 .filter_map(|(key, value)| {
437 let scopes = quote!(#(#value),*);
438 if value.is_empty() {
439 None
440 } else {
441 Some(format!("\t{}:\n\t\t{}", format_token_stream(key), format_token_stream(&scopes)))
442 }
443 })
444 .collect::<Vec<String>>();
445 iter.sort();
446 iter
447}
448
449fn format_complex_obj(vec: Vec<Vec<String>>) -> String {
450 vec.into_iter()
451 .flatten()
452 .collect::<Vec<String>>()
453 .join("\n\t")
454}
455
456pub fn format_global_context(context: &GlobalContext) -> String {
457 format_complex_obj(vec![
458 vec!["-- types:".to_string(), context.scope_register.to_string()],
459 vec!["-- traits:".to_string()], scope_traits_dict(&context.traits.inner),
460 vec!["-- traits_impl:".to_string()], traits_impl_dict(&context.traits.used_traits_dictionary),
461 vec!["-- custom:".to_string(), context.custom.to_string()],
462 vec!["-- imports:".to_string()], scope_imports_dict(&context.imports.inner),
463 vec!["-- generics:".to_string()], scope_generics_dict(&context.generics.inner),
464 ])
465}
466
467#[allow(unused)]
468pub fn format_trait_decomposition_part1(dict: &TraitDecompositionPart1) -> String {
469 format_complex_obj(vec![
470 vec!["\n-- ident:".to_string()], vec![format_token_stream(&dict.ident)],
471 vec!["-- consts:".to_string()], ident_types_dict(&dict.consts),
472 vec!["-- methods:".to_string()], ident_signatures_dict(&dict.methods),
473 vec!["-- types:".to_string()], ident_trait_type_decomposition_dict(&dict.types),
474 ])
475}
476
477#[allow(dead_code)]
493pub enum Emoji {
494 Branch,
495 Question,
496 Local,
497 Nothing,
498 Ok,
499 Error,
500 Plus,
501 Node,
502 Folder,
503 File
504}
505
506impl Display for Emoji {
507 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
508 f.write_char(
509 match self {
510 Emoji::Question => '\u{2753}',
511 Emoji::Branch => '\u{1D30E}',
512 Emoji::Local => '\u{1F501}',
513 Emoji::Ok => '\u{2705}',
514 Emoji::Error => '\u{274C}',
515 Emoji::Nothing => '\u{1F502}',
516 Emoji::Plus => '\u{271A}',
517 Emoji::Node => '\u{1F491}',
518 Emoji::Folder => '\u{1f4c1}',
519 Emoji::File => '\u{1f4c4}'
520 })
521 }
522}
523
524#[macro_export]
525macro_rules! nprint {
526 ($counter:expr, $emoji:expr, $($arg:tt)*) => {
527 };
533}
534
535#[macro_export]
536macro_rules! print_phase {
537 ($label:expr, $($arg:tt)*) => {
538 println!("\n########################################################################################################################");
539 println!("# {}", $label);
540 println!("########################################################################################################################");
541 println!("{}", format!($($arg)*));
542 println!("########################################################################################################################\n");
543 }
544}
545