1use std::{
5 env, fmt,
6 ops::AddAssign,
7 panic::{AssertUnwindSafe, catch_unwind},
8 time::{SystemTime, UNIX_EPOCH},
9};
10
11use cfg::{CfgAtom, CfgDiff};
12use hir::{
13 Adt, AssocItem, Crate, DefWithBody, FindPathConfig, HasCrate, HasSource, HirDisplay, ModuleDef,
14 Name, crate_lang_items,
15 db::{DefDatabase, ExpandDatabase, HirDatabase},
16 next_solver::{DbInterner, GenericArgs},
17};
18use hir_def::{
19 SyntheticSyntax,
20 expr_store::BodySourceMap,
21 hir::{ExprId, PatId},
22};
23use hir_ty::InferenceResult;
24use ide::{
25 Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve,
26 InlayHintsConfig, LineCol, RootDatabase,
27};
28use ide_db::{
29 EditionedFileId, LineIndexDatabase, MiniCore, SnippetCap,
30 base_db::{SourceDatabase, salsa::Database},
31};
32use itertools::Itertools;
33use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
34use oorandom::Rand32;
35use profile::StopWatch;
36use project_model::{CargoConfig, CfgOverrides, ProjectManifest, ProjectWorkspace, RustLibSource};
37use rayon::prelude::*;
38use rustc_hash::{FxHashMap, FxHashSet};
39use rustc_type_ir::inherent::Ty as _;
40use syntax::AstNode;
41use vfs::{AbsPathBuf, Vfs, VfsPath};
42
43use crate::cli::{
44 Verbosity,
45 flags::{self, OutputFormat},
46 full_name_of_item, print_memory_usage,
47 progress_report::ProgressReport,
48 report_metric,
49};
50
51impl flags::AnalysisStats {
52 pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
53 let mut rng = {
54 let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
55 Rand32::new(seed)
56 };
57
58 let cargo_config = CargoConfig {
59 sysroot: match self.no_sysroot {
60 true => None,
61 false => Some(RustLibSource::Discover),
62 },
63 all_targets: true,
64 set_test: !self.no_test,
65 cfg_overrides: CfgOverrides {
66 global: CfgDiff::new(vec![CfgAtom::Flag(hir::sym::miri)], vec![]),
67 selective: Default::default(),
68 },
69 ..Default::default()
70 };
71 let no_progress = &|_| ();
72
73 let mut db_load_sw = self.stop_watch();
74
75 let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(&self.path));
76 let manifest = ProjectManifest::discover_single(&path)?;
77
78 let mut workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
79 let metadata_time = db_load_sw.elapsed();
80 let load_cargo_config = LoadCargoConfig {
81 load_out_dirs_from_check: !self.disable_build_scripts,
82 with_proc_macro_server: if self.disable_proc_macros {
83 ProcMacroServerChoice::None
84 } else {
85 match self.proc_macro_srv {
86 Some(ref path) => {
87 let path = vfs::AbsPathBuf::assert_utf8(path.to_owned());
88 ProcMacroServerChoice::Explicit(path)
89 }
90 None => ProcMacroServerChoice::Sysroot,
91 }
92 },
93 prefill_caches: false,
94 proc_macro_processes: 1,
95 };
96
97 let build_scripts_time = if self.disable_build_scripts {
98 None
99 } else {
100 let mut build_scripts_sw = self.stop_watch();
101 let bs = workspace.run_build_scripts(&cargo_config, no_progress)?;
102 workspace.set_build_scripts(bs);
103 Some(build_scripts_sw.elapsed())
104 };
105
106 let (db, vfs, _proc_macro) =
107 load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?;
108 eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
109 eprint!(" (metadata {metadata_time}");
110 if let Some(build_scripts_time) = build_scripts_time {
111 eprint!("; build {build_scripts_time}");
112 }
113 eprintln!(")");
114
115 let mut host = AnalysisHost::with_database(db);
116 let db = host.raw_database();
117
118 let mut analysis_sw = self.stop_watch();
119
120 let mut krates = Crate::all(db);
121 if self.randomize {
122 shuffle(&mut rng, &mut krates);
123 }
124
125 let mut item_tree_sw = self.stop_watch();
126 let source_roots = krates
127 .iter()
128 .cloned()
129 .map(|krate| db.file_source_root(krate.root_file(db)).source_root_id(db))
130 .unique();
131
132 let mut dep_loc = 0;
133 let mut workspace_loc = 0;
134 let mut dep_item_trees = 0;
135 let mut workspace_item_trees = 0;
136
137 let mut workspace_item_stats = PrettyItemStats::default();
138 let mut dep_item_stats = PrettyItemStats::default();
139
140 for source_root_id in source_roots {
141 let source_root = db.source_root(source_root_id).source_root(db);
142 for file_id in source_root.iter() {
143 if let Some(p) = source_root.path_for_file(&file_id)
144 && let Some((_, Some("rs"))) = p.name_and_extension()
145 {
146 if !source_root.is_library || self.with_deps {
148 let length = db.file_text(file_id).text(db).lines().count();
149 let item_stats = db
150 .file_item_tree(
151 EditionedFileId::current_edition_guess_origin(db, file_id).into(),
152 )
153 .item_tree_stats()
154 .into();
155
156 workspace_loc += length;
157 workspace_item_trees += 1;
158 workspace_item_stats += item_stats;
159 } else {
160 let length = db.file_text(file_id).text(db).lines().count();
161 let item_stats = db
162 .file_item_tree(
163 EditionedFileId::current_edition_guess_origin(db, file_id).into(),
164 )
165 .item_tree_stats()
166 .into();
167
168 dep_loc += length;
169 dep_item_trees += 1;
170 dep_item_stats += item_stats;
171 }
172 }
173 }
174 }
175 eprintln!(" item trees: {workspace_item_trees}");
176 let item_tree_time = item_tree_sw.elapsed();
177
178 eprintln!(
179 " dependency lines of code: {}, item trees: {}",
180 UsizeWithUnderscore(dep_loc),
181 UsizeWithUnderscore(dep_item_trees),
182 );
183 eprintln!(" dependency item stats: {dep_item_stats}");
184
185 eprintln!("{:<20} {}", "Item Tree Collection:", item_tree_time);
202 report_metric("item tree time", item_tree_time.time.as_millis() as u64, "ms");
203 eprintln!(" Total Statistics:");
204
205 let mut crate_def_map_sw = self.stop_watch();
206 let mut num_crates = 0;
207 let mut visited_modules = FxHashSet::default();
208 let mut visit_queue = Vec::new();
209 for &krate in &krates {
210 let module = krate.root_module(db);
211 let file_id = module.definition_source_file_id(db);
212 let file_id = file_id.original_file(db);
213
214 let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
215 let source_root = db.source_root(source_root).source_root(db);
216 if !source_root.is_library || self.with_deps {
217 num_crates += 1;
218 visit_queue.push(module);
219 }
220 }
221
222 if self.randomize {
223 shuffle(&mut rng, &mut visit_queue);
224 }
225
226 eprint!(" crates: {num_crates}");
227 let mut num_decls = 0;
228 let mut bodies = Vec::new();
229 let mut adts = Vec::new();
230 let mut file_ids = Vec::new();
231
232 let mut num_traits = 0;
233 let mut num_macro_rules_macros = 0;
234 let mut num_proc_macros = 0;
235
236 while let Some(module) = visit_queue.pop() {
237 if visited_modules.insert(module) {
238 file_ids.extend(module.as_source_file_id(db));
239 visit_queue.extend(module.children(db));
240
241 for decl in module.declarations(db) {
242 num_decls += 1;
243 match decl {
244 ModuleDef::Function(f) => bodies.push(DefWithBody::from(f)),
245 ModuleDef::Adt(a) => {
246 if let Adt::Enum(e) = a {
247 for v in e.variants(db) {
248 bodies.push(DefWithBody::from(v));
249 }
250 }
251 adts.push(a)
252 }
253 ModuleDef::Const(c) => {
254 bodies.push(DefWithBody::from(c));
255 }
256 ModuleDef::Static(s) => bodies.push(DefWithBody::from(s)),
257 ModuleDef::Trait(_) => num_traits += 1,
258 ModuleDef::Macro(m) => match m.kind(db) {
259 hir::MacroKind::Declarative => num_macro_rules_macros += 1,
260 hir::MacroKind::Derive
261 | hir::MacroKind::Attr
262 | hir::MacroKind::ProcMacro => num_proc_macros += 1,
263 _ => (),
264 },
265 _ => (),
266 };
267 }
268
269 for impl_def in module.impl_defs(db) {
270 for item in impl_def.items(db) {
271 num_decls += 1;
272 match item {
273 AssocItem::Function(f) => bodies.push(DefWithBody::from(f)),
274 AssocItem::Const(c) => {
275 bodies.push(DefWithBody::from(c));
276 }
277 _ => (),
278 }
279 }
280 }
281 }
282 }
283 eprintln!(
284 ", mods: {}, decls: {num_decls}, bodies: {}, adts: {}, consts: {}",
285 visited_modules.len(),
286 bodies.len(),
287 adts.len(),
288 bodies
289 .iter()
290 .filter(|it| matches!(it, DefWithBody::Const(_) | DefWithBody::Static(_)))
291 .count(),
292 );
293
294 eprintln!(" Workspace:");
295 eprintln!(
296 " traits: {num_traits}, macro_rules macros: {num_macro_rules_macros}, proc_macros: {num_proc_macros}"
297 );
298 eprintln!(
299 " lines of code: {}, item trees: {}",
300 UsizeWithUnderscore(workspace_loc),
301 UsizeWithUnderscore(workspace_item_trees),
302 );
303 eprintln!(" usages: {workspace_item_stats}");
304
305 eprintln!(" Dependencies:");
306 eprintln!(
307 " lines of code: {}, item trees: {}",
308 UsizeWithUnderscore(dep_loc),
309 UsizeWithUnderscore(dep_item_trees),
310 );
311 eprintln!(" declarations: {dep_item_stats}");
312
313 let crate_def_map_time = crate_def_map_sw.elapsed();
314 eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time);
315 report_metric("crate def map time", crate_def_map_time.time.as_millis() as u64, "ms");
316
317 if self.randomize {
318 shuffle(&mut rng, &mut bodies);
319 }
320
321 hir::attach_db(db, || {
322 if !self.skip_lang_items {
323 self.run_lang_items(db, &krates, verbosity);
324 }
325
326 if !self.skip_lowering {
327 self.run_body_lowering(db, &vfs, &bodies, verbosity);
328 }
329
330 if !self.skip_inference {
331 self.run_inference(db, &vfs, &bodies, verbosity);
332 }
333
334 if !self.skip_mir_stats {
335 self.run_mir_lowering(db, &bodies, verbosity);
336 }
337
338 if !self.skip_data_layout {
339 self.run_data_layout(db, &adts, verbosity);
340 }
341
342 if !self.skip_const_eval {
343 self.run_const_eval(db, &bodies, verbosity);
344 }
345 });
346
347 file_ids.sort();
348 file_ids.dedup();
349
350 if self.run_all_ide_things {
351 self.run_ide_things(host.analysis(), &file_ids, db, &vfs, verbosity);
352 }
353
354 if self.run_term_search {
355 self.run_term_search(&workspace, db, &vfs, &file_ids, verbosity);
356 }
357
358 let db = host.raw_database_mut();
359 db.trigger_lru_eviction();
360 hir::clear_tls_solver_cache();
361 unsafe { hir::collect_ty_garbage() };
362
363 let total_span = analysis_sw.elapsed();
364 eprintln!("{:<20} {total_span}", "Total:");
365 report_metric("total time", total_span.time.as_millis() as u64, "ms");
366 if let Some(instructions) = total_span.instructions {
367 report_metric("total instructions", instructions, "#instr");
368 }
369 report_metric("total memory", total_span.memory.allocated.megabytes() as u64, "MB");
370
371 if verbosity.is_verbose() {
372 print_memory_usage(host, vfs);
373 }
374
375 Ok(())
376 }
377
378 fn run_data_layout(&self, db: &RootDatabase, adts: &[hir::Adt], verbosity: Verbosity) {
379 let mut sw = self.stop_watch();
380 let mut all = 0;
381 let mut fail = 0;
382 for &a in adts {
383 let interner = DbInterner::new_no_crate(db);
384 let generic_params = db.generic_params(a.into());
385 if generic_params.iter_type_or_consts().next().is_some()
386 || generic_params.iter_lt().next().is_some()
387 {
388 continue;
390 }
391 all += 1;
392 let Err(e) = db.layout_of_adt(
393 hir_def::AdtId::from(a),
394 GenericArgs::empty(interner).store(),
395 hir_ty::ParamEnvAndCrate {
396 param_env: db.trait_environment(a.into()),
397 krate: a.krate(db).into(),
398 }
399 .store(),
400 ) else {
401 continue;
402 };
403 if verbosity.is_spammy() {
404 let full_name = full_name_of_item(db, a.module(db), a.name(db));
405 println!("Data layout for {full_name} failed due {e:?}");
406 }
407 fail += 1;
408 }
409 let data_layout_time = sw.elapsed();
410 eprintln!("{:<20} {}", "Data layouts:", data_layout_time);
411 eprintln!("Failed data layouts: {fail} ({}%)", percentage(fail, all));
412 report_metric("failed data layouts", fail, "#");
413 report_metric("data layout time", data_layout_time.time.as_millis() as u64, "ms");
414 }
415
416 fn run_const_eval(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
417 let len = bodies
418 .iter()
419 .filter(|body| matches!(body, DefWithBody::Const(_) | DefWithBody::Static(_)))
420 .count();
421 let mut bar = match verbosity {
422 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
423 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
424 _ => ProgressReport::new(len),
425 };
426
427 let mut sw = self.stop_watch();
428 let mut all = 0;
429 let mut fail = 0;
430 for &b in bodies {
431 bar.set_message(move || format!("const eval: {}", full_name(db, b, b.module(db))));
432 let res = match b {
433 DefWithBody::Const(c) => c.eval(db),
434 DefWithBody::Static(s) => s.eval(db),
435 _ => continue,
436 };
437 bar.inc(1);
438 all += 1;
439 let Err(error) = res else {
440 continue;
441 };
442 if verbosity.is_spammy() {
443 let full_name =
444 full_name_of_item(db, b.module(db), b.name(db).unwrap_or(Name::missing()));
445 bar.println(format!("Const eval for {full_name} failed due {error:?}"));
446 }
447 fail += 1;
448 }
449 bar.finish_and_clear();
450 let const_eval_time = sw.elapsed();
451 eprintln!("{:<20} {}", "Const evaluation:", const_eval_time);
452 eprintln!("Failed const evals: {fail} ({}%)", percentage(fail, all));
453 report_metric("failed const evals", fail, "#");
454 report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms");
455 }
456
457 fn run_term_search(
459 &self,
460 ws: &ProjectWorkspace,
461 db: &RootDatabase,
462 vfs: &Vfs,
463 file_ids: &[EditionedFileId],
464 verbosity: Verbosity,
465 ) {
466 let cargo_config = CargoConfig {
467 sysroot: match self.no_sysroot {
468 true => None,
469 false => Some(RustLibSource::Discover),
470 },
471 all_targets: true,
472 ..Default::default()
473 };
474
475 let mut bar = match verbosity {
476 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
477 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
478 _ => ProgressReport::new(file_ids.len()),
479 };
480
481 #[derive(Debug, Default)]
482 struct Acc {
483 tail_expr_syntax_hits: u64,
484 tail_expr_no_term: u64,
485 total_tail_exprs: u64,
486 error_codes: FxHashMap<String, u32>,
487 syntax_errors: u32,
488 }
489
490 let mut acc: Acc = Default::default();
491 bar.tick();
492 let mut sw = self.stop_watch();
493
494 for &file_id in file_ids {
495 let file_id = file_id.editioned_file_id(db);
496 let sema = hir::Semantics::new(db);
497 let display_target = match sema.first_crate(file_id.file_id()) {
498 Some(krate) => krate.to_display_target(sema.db),
499 None => continue,
500 };
501
502 let parse = sema.parse_guess_edition(file_id.into());
503 let file_txt = db.file_text(file_id.into());
504 let path = vfs.file_path(file_id.into()).as_path().unwrap();
505
506 for node in parse.syntax().descendants() {
507 let expr = match syntax::ast::Expr::cast(node.clone()) {
508 Some(it) => it,
509 None => continue,
510 };
511 let block = match syntax::ast::BlockExpr::cast(expr.syntax().clone()) {
512 Some(it) => it,
513 None => continue,
514 };
515 let target_ty = match sema.type_of_expr(&expr) {
516 Some(it) => it.adjusted(),
517 None => continue, };
519
520 let expected_tail = match block.tail_expr() {
521 Some(it) => it,
522 None => continue,
523 };
524
525 if expected_tail.is_block_like() {
526 continue;
527 }
528
529 let range = sema.original_range(expected_tail.syntax()).range;
530 let original_text: String = db
531 .file_text(file_id.into())
532 .text(db)
533 .chars()
534 .skip(usize::from(range.start()))
535 .take(usize::from(range.end()) - usize::from(range.start()))
536 .collect();
537
538 let scope = match sema.scope(expected_tail.syntax()) {
539 Some(it) => it,
540 None => continue,
541 };
542
543 let ctx = hir::term_search::TermSearchCtx {
544 sema: &sema,
545 scope: &scope,
546 goal: target_ty,
547 config: hir::term_search::TermSearchConfig {
548 enable_borrowcheck: true,
549 ..Default::default()
550 },
551 };
552 let found_terms = hir::term_search::term_search(&ctx);
553
554 if found_terms.is_empty() {
555 acc.tail_expr_no_term += 1;
556 acc.total_tail_exprs += 1;
557 continue;
559 };
560
561 fn trim(s: &str) -> String {
562 s.chars().filter(|c| !c.is_whitespace()).collect()
563 }
564
565 let todo = syntax::ast::make::ext::expr_todo().to_string();
566 let mut formatter = |_: &hir::Type<'_>| todo.clone();
567 let mut syntax_hit_found = false;
568 for term in found_terms {
569 let generated = term
570 .gen_source_code(
571 &scope,
572 &mut formatter,
573 FindPathConfig {
574 prefer_no_std: false,
575 prefer_prelude: true,
576 prefer_absolute: false,
577 allow_unstable: true,
578 },
579 display_target,
580 )
581 .unwrap();
582 syntax_hit_found |= trim(&original_text) == trim(&generated);
583
584 let mut txt = file_txt.text(db).to_string();
586
587 let edit = ide::TextEdit::replace(range, generated.clone());
588 edit.apply(&mut txt);
589
590 if self.validate_term_search {
591 std::fs::write(path, txt).unwrap();
592
593 let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap();
594 if let Some(err) = res.error()
595 && err.contains("error: could not compile")
596 {
597 if let Some(mut err_idx) = err.find("error[E") {
598 err_idx += 7;
599 let err_code = &err[err_idx..err_idx + 4];
600 match err_code {
601 "0282" | "0283" => continue, "0277" | "0308" if generated.contains(&todo) => continue, "0599"
606 if err.contains(
607 "the following trait is implemented but not in scope",
608 ) =>
609 {
610 continue;
611 }
612 _ => (),
613 }
614 bar.println(err);
615 bar.println(generated);
616 acc.error_codes
617 .entry(err_code.to_owned())
618 .and_modify(|n| *n += 1)
619 .or_insert(1);
620 } else {
621 acc.syntax_errors += 1;
622 bar.println(format!("Syntax error: \n{err}"));
623 }
624 }
625 }
626 }
627
628 if syntax_hit_found {
629 acc.tail_expr_syntax_hits += 1;
630 }
631 acc.total_tail_exprs += 1;
632
633 let msg = move || {
634 format!(
635 "processing: {:<50}",
636 trim(&original_text).chars().take(50).collect::<String>()
637 )
638 };
639 if verbosity.is_spammy() {
640 bar.println(msg());
641 }
642 bar.set_message(msg);
643 }
644 if self.validate_term_search {
646 std::fs::write(path, file_txt.text(db).to_string()).unwrap();
647 }
648
649 bar.inc(1);
650 }
651 let term_search_time = sw.elapsed();
652
653 bar.println(format!(
654 "Tail Expr syntactic hits: {}/{} ({}%)",
655 acc.tail_expr_syntax_hits,
656 acc.total_tail_exprs,
657 percentage(acc.tail_expr_syntax_hits, acc.total_tail_exprs)
658 ));
659 bar.println(format!(
660 "Tail Exprs found: {}/{} ({}%)",
661 acc.total_tail_exprs - acc.tail_expr_no_term,
662 acc.total_tail_exprs,
663 percentage(acc.total_tail_exprs - acc.tail_expr_no_term, acc.total_tail_exprs)
664 ));
665 if self.validate_term_search {
666 bar.println(format!(
667 "Tail Exprs total errors: {}, syntax errors: {}, error codes:",
668 acc.error_codes.values().sum::<u32>() + acc.syntax_errors,
669 acc.syntax_errors,
670 ));
671 for (err, count) in acc.error_codes {
672 bar.println(format!(
673 " E{err}: {count:>5} (https://doc.rust-lang.org/error_codes/E{err}.html)"
674 ));
675 }
676 }
677 bar.println(format!(
678 "Term search avg time: {}ms",
679 term_search_time.time.as_millis() as u64 / acc.total_tail_exprs
680 ));
681 bar.println(format!("{:<20} {}", "Term search:", term_search_time));
682 report_metric("term search time", term_search_time.time.as_millis() as u64, "ms");
683
684 bar.finish_and_clear();
685 }
686
687 fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
688 let mut bar = match verbosity {
689 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
690 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
691 _ => ProgressReport::new(bodies.len()),
692 };
693 let mut sw = self.stop_watch();
694 let mut all = 0;
695 let mut fail = 0;
696 for &body in bodies {
697 bar.set_message(move || {
698 format!("mir lowering: {}", full_name(db, body, body.module(db)))
699 });
700 bar.inc(1);
701 if matches!(body, DefWithBody::Variant(_)) {
702 continue;
703 }
704 let module = body.module(db);
705 if !self.should_process(db, body, module) {
706 continue;
707 }
708
709 all += 1;
710 let Ok(body_id) = body.try_into() else {
711 continue;
712 };
713 let Err(e) = db.mir_body(body_id) else {
714 continue;
715 };
716 if verbosity.is_spammy() {
717 let full_name = module
718 .path_to_root(db)
719 .into_iter()
720 .rev()
721 .filter_map(|it| it.name(db))
722 .chain(Some(body.name(db).unwrap_or_else(Name::missing)))
723 .map(|it| it.display(db, Edition::LATEST).to_string())
724 .join("::");
725 bar.println(format!("Mir body for {full_name} failed due {e:?}"));
726 }
727 fail += 1;
728 bar.tick();
729 }
730 let mir_lowering_time = sw.elapsed();
731 bar.finish_and_clear();
732 eprintln!("{:<20} {}", "MIR lowering:", mir_lowering_time);
733 eprintln!("Mir failed bodies: {fail} ({}%)", percentage(fail, all));
734 report_metric("mir failed bodies", fail, "#");
735 report_metric("mir lowering time", mir_lowering_time.time.as_millis() as u64, "ms");
736 }
737
738 fn run_inference(
739 &self,
740 db: &RootDatabase,
741 vfs: &Vfs,
742 bodies: &[DefWithBody],
743 verbosity: Verbosity,
744 ) {
745 let mut bar = match verbosity {
746 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
747 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
748 _ => ProgressReport::new(bodies.len()),
749 };
750
751 if self.parallel {
752 let mut inference_sw = self.stop_watch();
753 let bodies = bodies.iter().filter_map(|&body| body.try_into().ok()).collect::<Vec<_>>();
754 bodies
755 .par_iter()
756 .map_with(db.clone(), |snap, &body| {
757 snap.body(body);
758 InferenceResult::for_body(snap, body);
759 })
760 .count();
761 eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
762 }
763
764 let mut inference_sw = self.stop_watch();
765 bar.tick();
766 let mut num_exprs = 0;
767 let mut num_exprs_unknown = 0;
768 let mut num_exprs_partially_unknown = 0;
769 let mut num_expr_type_mismatches = 0;
770 let mut num_pats = 0;
771 let mut num_pats_unknown = 0;
772 let mut num_pats_partially_unknown = 0;
773 let mut num_pat_type_mismatches = 0;
774 let mut panics = 0;
775 for &body_id in bodies {
776 let Ok(body_def_id) = body_id.try_into() else { continue };
777 let name = body_id.name(db).unwrap_or_else(Name::missing);
778 let module = body_id.module(db);
779 let display_target = module.krate(db).to_display_target(db);
780 if let Some(only_name) = self.only.as_deref()
781 && name.display(db, Edition::LATEST).to_string() != only_name
782 && full_name(db, body_id, module) != only_name
783 {
784 continue;
785 }
786 let msg = move || {
787 if verbosity.is_verbose() {
788 let source = match body_id {
789 DefWithBody::Function(it) => it.source(db).map(|it| it.syntax().cloned()),
790 DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
791 DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
792 DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
793 };
794 if let Some(src) = source {
795 let original_file = src.file_id.original_file(db);
796 let path = vfs.file_path(original_file.file_id(db));
797 let syntax_range = src.text_range();
798 format!(
799 "processing: {} ({} {:?})",
800 full_name(db, body_id, module),
801 path,
802 syntax_range
803 )
804 } else {
805 format!("processing: {}", full_name(db, body_id, module))
806 }
807 } else {
808 format!("processing: {}", full_name(db, body_id, module))
809 }
810 };
811 if verbosity.is_spammy() {
812 bar.println(msg());
813 }
814 bar.set_message(msg);
815 let body = db.body(body_def_id);
816 let inference_result =
817 catch_unwind(AssertUnwindSafe(|| InferenceResult::for_body(db, body_def_id)));
818 let inference_result = match inference_result {
819 Ok(inference_result) => inference_result,
820 Err(p) => {
821 if let Some(s) = p.downcast_ref::<&str>() {
822 eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
823 } else if let Some(s) = p.downcast_ref::<String>() {
824 eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
825 } else {
826 eprintln!("infer panicked for {}", full_name(db, body_id, module));
827 }
828 panics += 1;
829 bar.inc(1);
830 continue;
831 }
832 };
833 let sm = || db.body_with_source_map(body_def_id).1;
835
836 let (previous_exprs, previous_unknown, previous_partially_unknown) =
838 (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
839 for (expr_id, _) in body.exprs() {
840 let ty = inference_result.expr_ty(expr_id);
841 num_exprs += 1;
842 let unknown_or_partial = if ty.is_ty_error() {
843 num_exprs_unknown += 1;
844 if verbosity.is_spammy() {
845 if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id)
846 {
847 bar.println(format!(
848 "{} {}:{}-{}:{}: Unknown type",
849 path,
850 start.line + 1,
851 start.col,
852 end.line + 1,
853 end.col,
854 ));
855 } else {
856 bar.println(format!(
857 "{}: Unknown type",
858 name.display(db, Edition::LATEST)
859 ));
860 }
861 }
862 true
863 } else {
864 let is_partially_unknown = ty.references_non_lt_error();
865 if is_partially_unknown {
866 num_exprs_partially_unknown += 1;
867 }
868 is_partially_unknown
869 };
870 if self.only.is_some() && verbosity.is_spammy() {
871 if let Some((_, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id) {
873 bar.println(format!(
874 "{}:{}-{}:{}: {}",
875 start.line + 1,
876 start.col,
877 end.line + 1,
878 end.col,
879 ty.display(db, display_target)
880 ));
881 } else {
882 bar.println(format!(
883 "unknown location: {}",
884 ty.display(db, display_target)
885 ));
886 }
887 }
888 if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
889 println!(
890 r#"{},type,"{}""#,
891 location_csv_expr(db, vfs, &sm(), expr_id),
892 ty.display(db, display_target)
893 );
894 }
895 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
896 num_expr_type_mismatches += 1;
897 if verbosity.is_verbose() {
898 if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id)
899 {
900 bar.println(format!(
901 "{} {}:{}-{}:{}: Expected {}, got {}",
902 path,
903 start.line + 1,
904 start.col,
905 end.line + 1,
906 end.col,
907 mismatch.expected.as_ref().display(db, display_target),
908 mismatch.actual.as_ref().display(db, display_target)
909 ));
910 } else {
911 bar.println(format!(
912 "{}: Expected {}, got {}",
913 name.display(db, Edition::LATEST),
914 mismatch.expected.as_ref().display(db, display_target),
915 mismatch.actual.as_ref().display(db, display_target)
916 ));
917 }
918 }
919 if self.output == Some(OutputFormat::Csv) {
920 println!(
921 r#"{},mismatch,"{}","{}""#,
922 location_csv_expr(db, vfs, &sm(), expr_id),
923 mismatch.expected.as_ref().display(db, display_target),
924 mismatch.actual.as_ref().display(db, display_target)
925 );
926 }
927 }
928 }
929 if verbosity.is_spammy() {
930 bar.println(format!(
931 "In {}: {} exprs, {} unknown, {} partial",
932 full_name(db, body_id, module),
933 num_exprs - previous_exprs,
934 num_exprs_unknown - previous_unknown,
935 num_exprs_partially_unknown - previous_partially_unknown
936 ));
937 }
938 let (previous_pats, previous_unknown, previous_partially_unknown) =
942 (num_pats, num_pats_unknown, num_pats_partially_unknown);
943 for (pat_id, _) in body.pats() {
944 let ty = inference_result.pat_ty(pat_id);
945 num_pats += 1;
946 let unknown_or_partial = if ty.is_ty_error() {
947 num_pats_unknown += 1;
948 if verbosity.is_spammy() {
949 if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
950 bar.println(format!(
951 "{} {}:{}-{}:{}: Unknown type",
952 path,
953 start.line + 1,
954 start.col,
955 end.line + 1,
956 end.col,
957 ));
958 } else {
959 bar.println(format!(
960 "{}: Unknown type",
961 name.display(db, Edition::LATEST)
962 ));
963 }
964 }
965 true
966 } else {
967 let is_partially_unknown = ty.references_non_lt_error();
968 if is_partially_unknown {
969 num_pats_partially_unknown += 1;
970 }
971 is_partially_unknown
972 };
973 if self.only.is_some() && verbosity.is_spammy() {
974 if let Some((_, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
976 bar.println(format!(
977 "{}:{}-{}:{}: {}",
978 start.line + 1,
979 start.col,
980 end.line + 1,
981 end.col,
982 ty.display(db, display_target)
983 ));
984 } else {
985 bar.println(format!(
986 "unknown location: {}",
987 ty.display(db, display_target)
988 ));
989 }
990 }
991 if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
992 println!(
993 r#"{},type,"{}""#,
994 location_csv_pat(db, vfs, &sm(), pat_id),
995 ty.display(db, display_target)
996 );
997 }
998 if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat_id) {
999 num_pat_type_mismatches += 1;
1000 if verbosity.is_verbose() {
1001 if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
1002 bar.println(format!(
1003 "{} {}:{}-{}:{}: Expected {}, got {}",
1004 path,
1005 start.line + 1,
1006 start.col,
1007 end.line + 1,
1008 end.col,
1009 mismatch.expected.as_ref().display(db, display_target),
1010 mismatch.actual.as_ref().display(db, display_target)
1011 ));
1012 } else {
1013 bar.println(format!(
1014 "{}: Expected {}, got {}",
1015 name.display(db, Edition::LATEST),
1016 mismatch.expected.as_ref().display(db, display_target),
1017 mismatch.actual.as_ref().display(db, display_target)
1018 ));
1019 }
1020 }
1021 if self.output == Some(OutputFormat::Csv) {
1022 println!(
1023 r#"{},mismatch,"{}","{}""#,
1024 location_csv_pat(db, vfs, &sm(), pat_id),
1025 mismatch.expected.as_ref().display(db, display_target),
1026 mismatch.actual.as_ref().display(db, display_target)
1027 );
1028 }
1029 }
1030 }
1031 if verbosity.is_spammy() {
1032 bar.println(format!(
1033 "In {}: {} pats, {} unknown, {} partial",
1034 full_name(db, body_id, module),
1035 num_pats - previous_pats,
1036 num_pats_unknown - previous_unknown,
1037 num_pats_partially_unknown - previous_partially_unknown
1038 ));
1039 }
1040 bar.inc(1);
1042 }
1043
1044 bar.finish_and_clear();
1045 let inference_time = inference_sw.elapsed();
1046 eprintln!(
1047 " exprs: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
1048 num_exprs,
1049 num_exprs_unknown,
1050 percentage(num_exprs_unknown, num_exprs),
1051 num_exprs_partially_unknown,
1052 percentage(num_exprs_partially_unknown, num_exprs),
1053 num_expr_type_mismatches
1054 );
1055 eprintln!(
1056 " pats: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
1057 num_pats,
1058 num_pats_unknown,
1059 percentage(num_pats_unknown, num_pats),
1060 num_pats_partially_unknown,
1061 percentage(num_pats_partially_unknown, num_pats),
1062 num_pat_type_mismatches
1063 );
1064 eprintln!(" panics: {panics}");
1065 eprintln!("{:<20} {}", "Inference:", inference_time);
1066 report_metric("unknown type", num_exprs_unknown, "#");
1067 report_metric("type mismatches", num_expr_type_mismatches, "#");
1068 report_metric("pattern unknown type", num_pats_unknown, "#");
1069 report_metric("pattern type mismatches", num_pat_type_mismatches, "#");
1070 report_metric("inference time", inference_time.time.as_millis() as u64, "ms");
1071 }
1072
1073 fn run_body_lowering(
1074 &self,
1075 db: &RootDatabase,
1076 vfs: &Vfs,
1077 bodies: &[DefWithBody],
1078 verbosity: Verbosity,
1079 ) {
1080 let mut bar = match verbosity {
1081 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
1082 _ if self.output.is_some() => ProgressReport::hidden(),
1083 _ => ProgressReport::new(bodies.len()),
1084 };
1085
1086 let mut sw = self.stop_watch();
1087 bar.tick();
1088 for &body_id in bodies {
1089 let Ok(body_def_id) = body_id.try_into() else { continue };
1090 let module = body_id.module(db);
1091 if !self.should_process(db, body_id, module) {
1092 continue;
1093 }
1094 let msg = move || {
1095 if verbosity.is_verbose() {
1096 let source = match body_id {
1097 DefWithBody::Function(it) => it.source(db).map(|it| it.syntax().cloned()),
1098 DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
1099 DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
1100 DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
1101 };
1102 if let Some(src) = source {
1103 let original_file = src.file_id.original_file(db);
1104 let path = vfs.file_path(original_file.file_id(db));
1105 let syntax_range = src.text_range();
1106 format!(
1107 "processing: {} ({} {:?})",
1108 full_name(db, body_id, module),
1109 path,
1110 syntax_range
1111 )
1112 } else {
1113 format!("processing: {}", full_name(db, body_id, module))
1114 }
1115 } else {
1116 format!("processing: {}", full_name(db, body_id, module))
1117 }
1118 };
1119 if verbosity.is_spammy() {
1120 bar.println(msg());
1121 }
1122 bar.set_message(msg);
1123 db.body(body_def_id);
1124 bar.inc(1);
1125 }
1126
1127 bar.finish_and_clear();
1128 let body_lowering_time = sw.elapsed();
1129 eprintln!("{:<20} {}", "Body lowering:", body_lowering_time);
1130 report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms");
1131 }
1132
1133 fn run_lang_items(&self, db: &RootDatabase, crates: &[Crate], verbosity: Verbosity) {
1134 let mut bar = match verbosity {
1135 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
1136 _ if self.output.is_some() => ProgressReport::hidden(),
1137 _ => ProgressReport::new(crates.len()),
1138 };
1139
1140 let mut sw = self.stop_watch();
1141 bar.tick();
1142 for &krate in crates {
1143 crate_lang_items(db, krate.into());
1144 bar.inc(1);
1145 }
1146
1147 bar.finish_and_clear();
1148 let time = sw.elapsed();
1149 eprintln!("{:<20} {}", "Crate lang items:", time);
1150 report_metric("crate lang items time", time.time.as_millis() as u64, "ms");
1151 }
1152
1153 fn run_ide_things(
1155 &self,
1156 analysis: Analysis,
1157 file_ids: &[EditionedFileId],
1158 db: &RootDatabase,
1159 vfs: &Vfs,
1160 verbosity: Verbosity,
1161 ) {
1162 let len = file_ids.len();
1163 let create_bar = || match verbosity {
1164 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
1165 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
1166 _ => ProgressReport::new(len),
1167 };
1168
1169 let mut sw = self.stop_watch();
1170
1171 let mut bar = create_bar();
1172 for &file_id in file_ids {
1173 let msg = format!("diagnostics: {}", vfs.file_path(file_id.file_id(db)));
1174 bar.set_message(move || msg.clone());
1175 _ = analysis.full_diagnostics(
1176 &DiagnosticsConfig {
1177 enabled: true,
1178 proc_macros_enabled: true,
1179 proc_attr_macros_enabled: true,
1180 disable_experimental: false,
1181 disabled: Default::default(),
1182 expr_fill_default: Default::default(),
1183 snippet_cap: SnippetCap::new(true),
1184 insert_use: ide_db::imports::insert_use::InsertUseConfig {
1185 granularity: ide_db::imports::insert_use::ImportGranularity::Crate,
1186 enforce_granularity: true,
1187 prefix_kind: hir::PrefixKind::ByCrate,
1188 group: true,
1189 skip_glob_imports: true,
1190 },
1191 prefer_no_std: false,
1192 prefer_prelude: true,
1193 prefer_absolute: false,
1194 style_lints: false,
1195 term_search_fuel: 400,
1196 term_search_borrowck: true,
1197 show_rename_conflicts: true,
1198 },
1199 ide::AssistResolveStrategy::All,
1200 analysis.editioned_file_id_to_vfs(file_id),
1201 );
1202 bar.inc(1);
1203 }
1204 bar.finish_and_clear();
1205
1206 let mut bar = create_bar();
1207 for &file_id in file_ids {
1208 let msg = format!("inlay hints: {}", vfs.file_path(file_id.file_id(db)));
1209 bar.set_message(move || msg.clone());
1210 _ = analysis.inlay_hints(
1211 &InlayHintsConfig {
1212 render_colons: false,
1213 type_hints: true,
1214 sized_bound: false,
1215 discriminant_hints: ide::DiscriminantHints::Always,
1216 parameter_hints: true,
1217 parameter_hints_for_missing_arguments: false,
1218 generic_parameter_hints: ide::GenericParameterHints {
1219 type_hints: true,
1220 lifetime_hints: true,
1221 const_hints: true,
1222 },
1223 chaining_hints: true,
1224 adjustment_hints: ide::AdjustmentHints::Always,
1225 adjustment_hints_disable_reborrows: true,
1226 adjustment_hints_mode: ide::AdjustmentHintsMode::Postfix,
1227 adjustment_hints_hide_outside_unsafe: false,
1228 closure_return_type_hints: ide::ClosureReturnTypeHints::Always,
1229 closure_capture_hints: true,
1230 binding_mode_hints: true,
1231 implicit_drop_hints: true,
1232 implied_dyn_trait_hints: true,
1233 lifetime_elision_hints: ide::LifetimeElisionHints::Always,
1234 param_names_for_lifetime_elision_hints: true,
1235 hide_inferred_type_hints: false,
1236 hide_named_constructor_hints: false,
1237 hide_closure_initialization_hints: false,
1238 hide_closure_parameter_hints: false,
1239 closure_style: hir::ClosureStyle::ImplFn,
1240 max_length: Some(25),
1241 closing_brace_hints_min_lines: Some(20),
1242 fields_to_resolve: InlayFieldsToResolve::empty(),
1243 range_exclusive_hints: true,
1244 minicore: MiniCore::default(),
1245 },
1246 analysis.editioned_file_id_to_vfs(file_id),
1247 None,
1248 );
1249 bar.inc(1);
1250 }
1251 bar.finish_and_clear();
1252
1253 let mut bar = create_bar();
1254 let annotation_config = AnnotationConfig {
1255 binary_target: true,
1256 annotate_runnables: true,
1257 annotate_impls: true,
1258 annotate_references: false,
1259 annotate_method_references: false,
1260 annotate_enum_variant_references: false,
1261 location: ide::AnnotationLocation::AboveName,
1262 filter_adjacent_derive_implementations: false,
1263 minicore: MiniCore::default(),
1264 };
1265 for &file_id in file_ids {
1266 let msg = format!("annotations: {}", vfs.file_path(file_id.file_id(db)));
1267 bar.set_message(move || msg.clone());
1268 analysis
1269 .annotations(&annotation_config, analysis.editioned_file_id_to_vfs(file_id))
1270 .unwrap()
1271 .into_iter()
1272 .for_each(|annotation| {
1273 _ = analysis.resolve_annotation(&annotation_config, annotation);
1274 });
1275 bar.inc(1);
1276 }
1277 bar.finish_and_clear();
1278
1279 let ide_time = sw.elapsed();
1280 eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len());
1281 }
1282
1283 fn should_process(&self, db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> bool {
1284 if let Some(only_name) = self.only.as_deref() {
1285 let name = body_id.name(db).unwrap_or_else(Name::missing);
1286
1287 if name.display(db, Edition::LATEST).to_string() != only_name
1288 && full_name(db, body_id, module) != only_name
1289 {
1290 return false;
1291 }
1292 }
1293 true
1294 }
1295
1296 fn stop_watch(&self) -> StopWatch {
1297 StopWatch::start()
1298 }
1299}
1300
1301fn full_name(db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> String {
1302 module
1303 .krate(db)
1304 .display_name(db)
1305 .map(|it| it.canonical_name().as_str().to_owned())
1306 .into_iter()
1307 .chain(
1308 module
1309 .path_to_root(db)
1310 .into_iter()
1311 .filter_map(|it| it.name(db))
1312 .rev()
1313 .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
1314 .map(|it| it.display(db, Edition::LATEST).to_string()),
1315 )
1316 .join("::")
1317}
1318
1319fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String {
1320 let src = match sm.expr_syntax(expr_id) {
1321 Ok(s) => s,
1322 Err(SyntheticSyntax) => return "synthetic,,".to_owned(),
1323 };
1324 let root = db.parse_or_expand(src.file_id);
1325 let node = src.map(|e| e.to_node(&root).syntax().clone());
1326 let original_range = node.as_ref().original_file_range_rooted(db);
1327 let path = vfs.file_path(original_range.file_id.file_id(db));
1328 let line_index = db.line_index(original_range.file_id.file_id(db));
1329 let text_range = original_range.range;
1330 let (start, end) =
1331 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1332 format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
1333}
1334
1335fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: PatId) -> String {
1336 let src = match sm.pat_syntax(pat_id) {
1337 Ok(s) => s,
1338 Err(SyntheticSyntax) => return "synthetic,,".to_owned(),
1339 };
1340 let root = db.parse_or_expand(src.file_id);
1341 let node = src.map(|e| e.to_node(&root).syntax().clone());
1342 let original_range = node.as_ref().original_file_range_rooted(db);
1343 let path = vfs.file_path(original_range.file_id.file_id(db));
1344 let line_index = db.line_index(original_range.file_id.file_id(db));
1345 let text_range = original_range.range;
1346 let (start, end) =
1347 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1348 format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
1349}
1350
1351fn expr_syntax_range<'a>(
1352 db: &RootDatabase,
1353 vfs: &'a Vfs,
1354 sm: &BodySourceMap,
1355 expr_id: ExprId,
1356) -> Option<(&'a VfsPath, LineCol, LineCol)> {
1357 let src = sm.expr_syntax(expr_id);
1358 if let Ok(src) = src {
1359 let root = db.parse_or_expand(src.file_id);
1360 let node = src.map(|e| e.to_node(&root).syntax().clone());
1361 let original_range = node.as_ref().original_file_range_rooted(db);
1362 let path = vfs.file_path(original_range.file_id.file_id(db));
1363 let line_index = db.line_index(original_range.file_id.file_id(db));
1364 let text_range = original_range.range;
1365 let (start, end) =
1366 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1367 Some((path, start, end))
1368 } else {
1369 None
1370 }
1371}
1372fn pat_syntax_range<'a>(
1373 db: &RootDatabase,
1374 vfs: &'a Vfs,
1375 sm: &BodySourceMap,
1376 pat_id: PatId,
1377) -> Option<(&'a VfsPath, LineCol, LineCol)> {
1378 let src = sm.pat_syntax(pat_id);
1379 if let Ok(src) = src {
1380 let root = db.parse_or_expand(src.file_id);
1381 let node = src.map(|e| e.to_node(&root).syntax().clone());
1382 let original_range = node.as_ref().original_file_range_rooted(db);
1383 let path = vfs.file_path(original_range.file_id.file_id(db));
1384 let line_index = db.line_index(original_range.file_id.file_id(db));
1385 let text_range = original_range.range;
1386 let (start, end) =
1387 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1388 Some((path, start, end))
1389 } else {
1390 None
1391 }
1392}
1393
1394fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
1395 for i in 0..slice.len() {
1396 randomize_first(rng, &mut slice[i..]);
1397 }
1398
1399 fn randomize_first<T>(rng: &mut Rand32, slice: &mut [T]) {
1400 assert!(!slice.is_empty());
1401 let idx = rng.rand_range(0..slice.len() as u32) as usize;
1402 slice.swap(0, idx);
1403 }
1404}
1405
1406fn percentage(n: u64, total: u64) -> u64 {
1407 (n * 100).checked_div(total).unwrap_or(100)
1408}
1409
1410#[derive(Default, Debug, Eq, PartialEq)]
1411struct UsizeWithUnderscore(usize);
1412
1413impl fmt::Display for UsizeWithUnderscore {
1414 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1415 let num_str = self.0.to_string();
1416
1417 if num_str.len() <= 3 {
1418 return write!(f, "{num_str}");
1419 }
1420
1421 let mut result = String::new();
1422
1423 for (count, ch) in num_str.chars().rev().enumerate() {
1424 if count > 0 && count % 3 == 0 {
1425 result.push('_');
1426 }
1427 result.push(ch);
1428 }
1429
1430 let result = result.chars().rev().collect::<String>();
1431 write!(f, "{result}")
1432 }
1433}
1434
1435impl std::ops::AddAssign for UsizeWithUnderscore {
1436 fn add_assign(&mut self, other: UsizeWithUnderscore) {
1437 self.0 += other.0;
1438 }
1439}
1440
1441#[derive(Default, Debug, Eq, PartialEq)]
1442struct PrettyItemStats {
1443 traits: UsizeWithUnderscore,
1444 impls: UsizeWithUnderscore,
1445 mods: UsizeWithUnderscore,
1446 macro_calls: UsizeWithUnderscore,
1447 macro_rules: UsizeWithUnderscore,
1448}
1449
1450impl From<hir_def::item_tree::ItemTreeDataStats> for PrettyItemStats {
1451 fn from(value: hir_def::item_tree::ItemTreeDataStats) -> Self {
1452 Self {
1453 traits: UsizeWithUnderscore(value.traits),
1454 impls: UsizeWithUnderscore(value.impls),
1455 mods: UsizeWithUnderscore(value.mods),
1456 macro_calls: UsizeWithUnderscore(value.macro_calls),
1457 macro_rules: UsizeWithUnderscore(value.macro_rules),
1458 }
1459 }
1460}
1461
1462impl AddAssign for PrettyItemStats {
1463 fn add_assign(&mut self, rhs: Self) {
1464 self.traits += rhs.traits;
1465 self.impls += rhs.impls;
1466 self.mods += rhs.mods;
1467 self.macro_calls += rhs.macro_calls;
1468 self.macro_rules += rhs.macro_rules;
1469 }
1470}
1471
1472impl fmt::Display for PrettyItemStats {
1473 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1474 write!(
1475 f,
1476 "traits: {}, impl: {}, mods: {}, macro calls: {}, macro rules: {}",
1477 self.traits, self.impls, self.mods, self.macro_calls, self.macro_rules
1478 )
1479 }
1480}
1481
1482