1use std::collections::{HashMap, HashSet};
2use std::fmt::{Display, Formatter, Write};
3use proc_macro2::{Spacing, TokenTree};
4use quote::{quote, ToTokens};
5use syn::{Attribute, Ident, ItemUse, Path, Signature, Type};
6use crate::ast::{PathHolder, TypeHolder, TypePathHolder};
7use crate::composable::{GenericBoundsModel, TraitModelPart1, TraitDecompositionPart1, TraitTypeModel};
8use crate::context::{GlobalContext, ScopeChain, TypeChain};
9use crate::kind::{MixinKind, ObjectKind};
10use crate::tree::{ScopeTreeID, ScopeTreeExportItem, ScopeTreeItem};
11
12#[allow(unused)]
13pub fn format_imported_set(dict: &HashSet<ItemUse>) -> String {
14 let debug_imports = dict.iter().map(|i| {
15 i.to_token_stream()
16 }).collect::<Vec<_>>();
17 let all = quote!(#(#debug_imports,)*);
18 all.to_string()
19}
20
21#[allow(unused)]
22pub fn format_scope_refinement(dict: &Vec<(ScopeChain, HashMap<TypeHolder, ObjectKind>)>) -> String {
23 let mut iter = dict.iter()
24 .map(|(scope, types)|
25 format!("\t{}: \n\t\t{}", scope.self_path_holder_ref(), types.iter().map(scope_type_conversion_pair).collect::<Vec<_>>()
26 .join("\n\t")))
27 .collect::<Vec<String>>();
28 iter.sort();
29 iter.join("\n")
30
31}
32
33#[allow(unused)]
34pub fn format_type_holders(dict: &HashSet<TypeHolder>) -> String {
35 dict.iter()
36 .map(|item| item.0.to_token_stream().to_string())
38 .collect::<Vec<_>>()
39 .join("\n\n")
40}
41#[allow(unused)]
42pub fn format_type_holders_vec(dict: &Vec<TypeHolder>) -> String {
43 dict.iter()
44 .map(|item| item.0.to_token_stream().to_string())
46 .collect::<Vec<_>>()
47 .join("\n\n")
48}
49#[allow(unused)]
50pub fn format_types(dict: &HashSet<Type>) -> String {
51 dict.iter()
52 .map(|item| item.to_token_stream().to_string())
54 .collect::<Vec<_>>()
55 .join("\n\n")
56}
57
58#[allow(unused)]
59pub fn format_mixin_kinds(dict: &HashMap<MixinKind, HashSet<Option<Attribute>>>) -> String {
60 dict.iter()
61 .map(|(item, attrs)| format!("{}:\t {}", item, format_unique_attrs(attrs)))
62 .collect::<Vec<_>>()
63 .join("\n\t")
64}
65#[allow(unused)]
66pub fn format_mixin_conversions(dict: &HashMap<GenericBoundsModel, HashSet<Option<Attribute>>>) -> String {
67 dict.iter()
68 .map(|(item, attrs)| format!("{}:\n\t {}", item, format_unique_attrs(attrs)))
69 .collect::<Vec<_>>()
70 .join("\n\t")
71}
72
73#[allow(unused)]
74pub fn format_unique_attrs(dict: &HashSet<Option<Attribute>>) -> String {
75 dict.iter()
76 .map(|item| item.as_ref().map_or("[None]".to_string(), |a| a.to_token_stream().to_string()))
77 .collect::<Vec<_>>()
78 .join("\n\t")
79}
80
81pub fn format_attrs(dict: &Vec<Attribute>) -> String {
82 dict.iter()
83 .map(|item| item.to_token_stream().to_string())
84 .collect::<Vec<_>>()
85 .join("\n\t")
86}
87
88#[allow(unused)]
89pub fn format_imports(dict: &HashMap<ScopeChain, HashMap<PathHolder, Path>>) -> String {
90 let vec = scope_imports_dict(dict);
91 let expanded = quote!(#(#vec),*);
92 expanded.to_string()
93}
94
95#[allow(unused)]
96pub fn format_tree_exported_dict(dict: &HashMap<ScopeTreeID, ScopeTreeExportItem>) -> String {
97 dict.iter()
98 .map(|(ident, tree_item)| format!("{}: {}", ident, tree_item))
99 .collect::<Vec<_>>()
100 .join("\n\n")
101}
102
103#[allow(unused)]
104pub fn format_tree_item_dict(dict: &HashMap<ScopeTreeID, ScopeTreeItem>) -> String {
105 dict.iter()
106 .map(|(ident, tree_item)| format!("\t{}: {:?}", ident, tree_item))
107 .collect::<Vec<_>>()
108 .join("\n\n")
109}
110
111#[allow(unused)]
112pub fn scope_type_conversion_pair(dict: (&TypeHolder, &ObjectKind)) -> String {
113 format!("\t{}: {}", dict.0.to_token_stream(), dict.1)
114 }
116
117#[allow(unused)]
118pub fn refinement_pair(dict: (&TypeHolder, &Vec<ObjectKind>)) -> String {
119 format!("\t{}: \n\t\t{}", dict.0.to_token_stream(), dict.1.iter().map(|i| i.to_string()).collect::<Vec<_>>()
120 .join("\n\t"))
121 }
123#[allow(unused)]
131pub fn ident_type_conversion_pair(dict: (&Ident, &Type)) -> String {
132 format!("\t{}: {}", format_token_stream(dict.0), format_token_stream(dict.1))
133}
134
135#[allow(unused)]
136pub fn ident_signature_conversion_pair(dict: (&Ident, &Signature)) -> String {
137 format!("\t{}: {}", format_token_stream(dict.0), format_token_stream(dict.1))
138}
139
140#[allow(unused)]
141pub fn ident_trait_type_decomposition_conversion_pair(dict: (&Ident, &TraitTypeModel)) -> String {
142 format!("\t{}: {}", format_token_stream(dict.0), {
143 let TraitTypeModel { ident, trait_bounds } = dict.1;
144 quote!(#ident: [bounds: #(#trait_bounds)*])
145 })
146}
147fn format_ident_path_pair(pair: (&PathHolder, &Path)) -> String {
148 format!("\t{}: {}", format_token_stream(pair.0), format_token_stream(pair.1))
149}
150
151pub fn format_path_vec(vec: &Vec<Path>) -> String {
152 vec.iter().map(|p| p.to_token_stream().to_string()).collect::<Vec<_>>().join(",")
153}
154pub fn format_obj_vec(vec: &Vec<ObjectKind>) -> String {
155 vec.iter().map(|p| p.to_token_stream().to_string()).collect::<Vec<_>>().join(",")
156}
157
158#[allow(unused)]
159pub fn type_vec_path_conversion_pair(pair: (&Type, &Vec<Path>)) -> String {
160 format!("\t{}: [{}]", format_token_stream(pair.0), format_path_vec(pair.1))
161}
162#[allow(unused)]
163pub fn type_vec_obj_conversion_pair(pair: (&Type, &Vec<ObjectKind>)) -> String {
164 format!("\t{}: [{}]", format_token_stream(pair.0), format_obj_vec(pair.1))
165}
166#[allow(unused)]
167pub fn format_predicates_dict(vec: &HashMap<Type, Vec<Path>>) -> String {
168 vec.iter()
169 .map(type_vec_path_conversion_pair)
170 .collect::<Vec<_>>()
171 .join(",")
172}
173#[allow(unused)]
174pub fn format_predicates_obj_dict(vec: &HashMap<Type, Vec<ObjectKind>>) -> String {
175 vec.iter()
176 .map(type_vec_obj_conversion_pair)
177 .collect::<Vec<_>>()
178 .join(",")
179}
180
181#[allow(unused)]
182fn format_generic_bounds_pair(pair: (&TypePathHolder, &Vec<Path>)) -> String {
183 format!("\t{}: [{}]", format_token_stream(pair.0), format_path_vec(pair.1))
184}
185
186fn format_ident_trait_pair(pair: (&Ident, &TraitModelPart1)) -> String {
187 let implementors = &pair.1.implementors;
188 format!("\t{}: {}: [{}]", format_token_stream(pair.0), "...", quote!(#(#implementors),*))
189}
190
191#[allow(unused)]
192pub fn format_types_dict(dict: &HashMap<TypeHolder, ObjectKind>) -> String {
193 types_dict(dict)
194 .join("\n")
195}
196#[allow(unused)]
197pub fn format_types_to_refine(dict: &HashMap<TypeHolder, Vec<ObjectKind>>) -> String {
198 let mut iter = dict.iter()
199 .map(refinement_pair)
200 .collect::<Vec<String>>();
201 iter.sort();
202 iter.join("\n")
203}
204
205#[allow(unused)]
206pub fn format_ident_types_dict(dict: &HashMap<Ident, Type>) -> String {
207 ident_types_dict(dict)
208 .join("\n")
209}
210
211#[allow(unused)]
212pub fn format_scope_types_dict(dict: &HashMap<ScopeChain, TypeChain>) -> String {
213 dict.iter().map(|(scope, tc)| {
214 format!("{}: \n\t{}", scope.fmt_short(), format_types_dict(&tc.inner))
215 }).collect::<Vec<_>>()
216 .join("\n")
217}
218#[allow(unused)]
220pub fn format_used_traits(dict: &HashMap<ScopeChain, HashMap<Ident, TraitModelPart1>>) -> String {
221 scope_traits_dict(dict).join("\n")
222}
223
224pub fn format_token_stream<TT: ToTokens>(token_stream: TT) -> String {
229 let token_stream = token_stream.into_token_stream();
231 let mut formatted_string = String::new();
232 let mut space_needed = false;
233 let mut inside_angle_brackets = 0;
234 let mut inside_round_brackets = 0;
235 let mut last_token_was_ampersand = false;
237 let mut last_token_was_comma = false;
238 for token in token_stream {
241 if last_token_was_comma {
242 formatted_string.push(' ');
243 }
244 last_token_was_comma = false;
245 match token {
246 TokenTree::Ident(ident) => {
247 if last_token_was_ampersand && (ident == "mut" || ident.to_string().starts_with('\'')) {
249 formatted_string.pop(); } else if space_needed {
251 formatted_string.push(' ');
252 }
253 formatted_string.push_str(&ident.to_string());
254 space_needed = true;
255 last_token_was_ampersand = false;
256 }
257 TokenTree::Punct(punct) => {
258 match punct.as_char() {
259 ';' => {
260 formatted_string.push(';');
261 space_needed = true;
262 }
263 ':' => {
264 formatted_string.push(':');
265 space_needed = false;
266 }
267 '(' => {
268 inside_round_brackets += 1;
269 formatted_string.push('(');
270 space_needed = false;
271 }
272 ')' => {
273 inside_round_brackets -= 1;
274 formatted_string.push(')');
275 space_needed = true;
276 }
277 '<' => {
278 inside_angle_brackets += 1;
279 formatted_string.push('<');
280 space_needed = false;
281 }
282 '>' => {
283 inside_angle_brackets -= 1;
284 formatted_string.push('>');
285 space_needed = true;
286 }
287 ',' => {
288 formatted_string.push(',');
289 last_token_was_comma = true;
290 space_needed = true; }
292 '&' => {
293 formatted_string.push('&');
294 last_token_was_ampersand = true;
295 space_needed = false;
296 }
297 _ => {
298 if space_needed {
299 formatted_string.push(' ');
300 }
301 formatted_string.push(punct.as_char());
302 space_needed = punct.spacing() == Spacing::Alone;
303 }
304 }
305 }
306 TokenTree::Literal(literal) => {
307 if space_needed {
308 formatted_string.push(' ');
309 }
310 formatted_string.push_str(&literal.to_string());
311 space_needed = true;
312 last_token_was_ampersand = false;
313 }
314 TokenTree::Group(group) => {
315 if space_needed && (inside_angle_brackets == 0 || inside_round_brackets == 0) {
316 formatted_string.push(' ');
317 }
318 formatted_string.push_str(&format_token_stream(group.stream()));
319 space_needed = true;
320 last_token_was_ampersand = false;
321 }
322 }
323 }
324
325 formatted_string
326}
327
328
329
330pub fn imports_dict(dict: &HashMap<PathHolder, Path>) -> Vec<String> {
333 dict.iter()
334 .map(format_ident_path_pair)
335 .collect()
336}
337
338#[allow(unused)]
339pub fn generic_bounds_dict(dict: &HashMap<TypePathHolder, Vec<Path>>) -> Vec<String> {
340 dict.iter()
341 .map(format_generic_bounds_pair)
342 .collect()
343}
344
345pub fn types_dict(dict: &HashMap<TypeHolder, ObjectKind>) -> Vec<String> {
346 let mut iter = dict.iter()
347 .map(scope_type_conversion_pair)
348 .collect::<Vec<String>>();
349 iter.sort();
350 iter
351}
352fn ident_signatures_dict(dict: &HashMap<Ident, Signature>) -> Vec<String> {
353 let mut iter = dict.iter()
354 .map(ident_signature_conversion_pair)
355 .collect::<Vec<String>>();
356 iter.sort();
357 iter
358}
359
360
361fn ident_trait_type_decomposition_dict(dict: &HashMap<Ident, TraitTypeModel>) -> Vec<String> {
362 let mut iter = dict.iter()
363 .map(ident_trait_type_decomposition_conversion_pair)
364 .collect::<Vec<String>>();
365 iter.sort();
366 iter
367}
368
369fn ident_types_dict(dict: &HashMap<Ident, Type>) -> Vec<String> {
370 let mut iter = dict.iter()
371 .map(ident_type_conversion_pair)
372 .collect::<Vec<String>>();
373 iter.sort();
374 iter
375}
376
377fn traits_dict(dict: &HashMap<Ident, TraitModelPart1>) -> Vec<String> {
378 let mut iter = dict.iter()
379 .map(format_ident_trait_pair)
380 .collect::<Vec<String>>();
381 iter.sort();
382 iter
383}
384
385
386fn nested_scope_dict<K, K2, V2, F: Fn(&K, &HashMap<K2, V2>) -> String>(dict: &HashMap<K, HashMap<K2, V2>>, mapper: F) -> Vec<String> {
387 let mut iter = dict.iter()
388 .map(|(key, value)| mapper(key, value))
389 .collect::<Vec<String>>();
390 iter.sort();
391 iter
392}
393
394fn format_scope_dict<K2, V2, F: Fn(&HashMap<K2, V2>) -> Vec<String>>(dict: &HashMap<ScopeChain, HashMap<K2, V2>>, mapper: F) -> Vec<String> {
395 nested_scope_dict(dict, |scope, sub_dict|
396 format!("\t{}:\n\t\t{}", scope.fmt_short(), mapper(sub_dict).join("\n\t\t")))
397}
398
399pub fn scope_imports_dict(dict: &HashMap<ScopeChain, HashMap<PathHolder, Path>>) -> Vec<String> {
400 format_scope_dict(dict, imports_dict)
401}
402
403#[allow(unused)]
404pub fn scope_generics_dict(dict: &HashMap<ScopeChain, HashMap<TypePathHolder, Vec<Path>>>) -> Vec<String> {
405 format_scope_dict(dict, generic_bounds_dict)
406}
407
408
409fn scope_traits_dict(dict: &HashMap<ScopeChain, HashMap<Ident, TraitModelPart1>>) -> Vec<String> {
410 format_scope_dict(dict, traits_dict)
411}
412
413
414
415fn traits_impl_dict(dict: &HashMap<ScopeChain, Vec<PathHolder>>) -> Vec<String> {
416 let mut iter = dict.iter()
417 .filter_map(|(key, value)| {
418 let scopes = quote!(#(#value),*);
419 if value.is_empty() {
420 None
421 } else {
422 Some(format!("\t{}:\n\t\t{}", format_token_stream(key), format_token_stream(&scopes)))
423 }
424 })
425 .collect::<Vec<String>>();
426 iter.sort();
427 iter
428}
429
430fn format_complex_obj(vec: Vec<Vec<String>>) -> String {
431 vec.into_iter()
432 .flatten()
433 .collect::<Vec<String>>()
434 .join("\n\t")
435}
436
437pub fn format_global_context(context: &GlobalContext) -> String {
438 format_complex_obj(vec![
439 vec!["-- types:".to_string(), context.scope_register.to_string()],
440 vec!["-- traits:".to_string()], scope_traits_dict(&context.traits.inner),
441 vec!["-- traits_impl:".to_string()], traits_impl_dict(&context.traits.used_traits_dictionary),
442 vec!["-- custom:".to_string(), context.custom.to_string()],
443 vec!["-- imports:".to_string()], scope_imports_dict(&context.imports.inner),
444 vec!["-- generics:".to_string()], scope_generics_dict(&context.generics.inner),
445 ])
446}
447
448#[allow(unused)]
449pub fn format_trait_decomposition_part1(dict: &TraitDecompositionPart1) -> String {
450 format_complex_obj(vec![
451 vec!["\n-- ident:".to_string()], vec![format_token_stream(&dict.ident)],
452 vec!["-- consts:".to_string()], ident_types_dict(&dict.consts),
453 vec!["-- methods:".to_string()], ident_signatures_dict(&dict.methods),
454 vec!["-- types:".to_string()], ident_trait_type_decomposition_dict(&dict.types),
455 ])
456}
457
458#[allow(dead_code)]
459pub enum Emoji {
460 Branch,
461 Question,
462 Local,
463 Nothing,
464 Ok,
465 Error,
466 Plus,
467 Node,
468 Folder,
469 File
470}
471
472impl Display for Emoji {
473 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
474 f.write_char(
475 match self {
476 Emoji::Question => '\u{2753}',
477 Emoji::Branch => '\u{1D30E}',
478 Emoji::Local => '\u{1F501}',
479 Emoji::Ok => '\u{2705}',
480 Emoji::Error => '\u{274C}',
481 Emoji::Nothing => '\u{1F502}',
482 Emoji::Plus => '\u{271A}',
483 Emoji::Node => '\u{1F491}',
484 Emoji::Folder => '\u{1f4c1}',
485 Emoji::File => '\u{1f4c4}'
486 })
487 }
488}
489
490#[macro_export]
491macro_rules! nprint {
492 ($counter:expr, $emoji:expr, $($arg:tt)*) => {
493 };
499}
500
501#[macro_export]
502macro_rules! print_phase {
503 ($label:expr, $($arg:tt)*) => {
504 println!("\n########################################################################################################################");
505 println!("# {}", $label);
506 println!("########################################################################################################################");
507 println!("{}", format!($($arg)*));
508 println!("########################################################################################################################\n");
509 }
510}
511