pub mod cli;
pub mod cross_crate;
pub mod examples;
pub mod model;
pub mod remote;
pub mod render;
pub mod resolve;
pub mod rustdoc_json;
pub mod search;
pub mod summary;
pub fn clean_cache(spec: &str) -> anyhow::Result<()> {
remote::clean_cache(spec)
}
use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use rustdoc_types::{Id, ItemEnum, Visibility};
use cli::{ApiArgs, ExamplesArgs, FilterArgs, SearchArgs, SummaryArgs};
use model::{CrateModel, compute_reachable_set};
struct GlobExpansionResult {
item_names: HashMap<String, Vec<String>>,
source_models: HashMap<String, Vec<CrateModel>>,
}
struct PipelineContext {
manifest_path: Option<String>,
target_dir: PathBuf,
package_name: String,
module_path: Option<String>,
observer_package: Option<String>,
toolchain: String,
verbose: bool,
use_cache: bool,
workspace_members: HashSet<String>,
crate_header: Option<String>,
_workspace: Option<remote::WorkspaceDir>,
}
fn generate_and_parse_model(
ctx: &PipelineContext,
) -> Result<(CrateModel, bool, Option<HashSet<Id>>)> {
if ctx.verbose {
eprintln!(
"[cargo-brief] Running cargo rustdoc for '{}'...",
ctx.package_name
);
}
let json_path = rustdoc_json::generate_rustdoc_json(
&ctx.package_name,
&ctx.toolchain,
ctx.manifest_path.as_deref(),
true, &ctx.target_dir,
ctx.verbose,
ctx.use_cache,
)
.with_context(|| format!("Failed to generate rustdoc JSON for '{}'", ctx.package_name))?;
if ctx.verbose {
eprintln!("[cargo-brief] Parsing rustdoc JSON...");
}
let krate = rustdoc_json::parse_rustdoc_json_cached(&json_path)
.with_context(|| format!("Failed to parse rustdoc JSON at '{}'", json_path.display()))?;
let model = CrateModel::from_crate(krate);
let same_crate = match &ctx.observer_package {
Some(obs) => obs == &ctx.package_name || obs.replace('-', "_") == model.crate_name(),
None => false,
};
let reachable = if !same_crate {
Some(compute_reachable_set(&model))
} else {
None
};
Ok((model, same_crate, reachable))
}
pub fn run_api_pipeline(args: &ApiArgs) -> Result<String> {
let ctx = if let Some(spec) = &args.remote.crates {
build_remote_context_api(args, spec)?
} else {
build_local_context_api(args)?
};
run_shared_api_pipeline(&ctx, args)
}
fn build_local_context_api(args: &ApiArgs) -> Result<PipelineContext> {
if args.global.verbose {
eprintln!(
"[cargo-brief] Resolving target '{}'...",
args.target.crate_name
);
}
let metadata = resolve::load_cargo_metadata(args.target.manifest_path.as_deref())
.context("Failed to load cargo metadata")?;
let resolved = resolve::resolve_target(
&args.target.crate_name,
args.target.module_path.as_deref(),
&metadata,
)
.context("Failed to resolve target")?;
let observer_package = args
.target
.at_package
.clone()
.or(metadata.current_package.clone());
Ok(PipelineContext {
manifest_path: args.target.manifest_path.clone(),
target_dir: metadata.target_dir,
package_name: resolved.package_name,
module_path: resolved.module_path,
observer_package,
toolchain: args.global.toolchain.clone(),
verbose: args.global.verbose,
use_cache: false, workspace_members: metadata.workspace_packages.into_iter().collect(),
crate_header: None,
_workspace: None,
})
}
fn build_remote_context_api(args: &ApiArgs, spec: &str) -> Result<PipelineContext> {
let module_path = if args.target.crate_name != "self" && args.target.module_path.is_none() {
let name = &args.target.crate_name;
if let Some(idx) = name.find("::") {
let rest = &name[idx + 2..];
if rest.is_empty() {
None
} else {
Some(rest.to_string())
}
} else {
Some(name.clone())
}
} else {
args.target.module_path.clone()
};
let (name, _) = remote::parse_crate_spec(spec);
if args.global.verbose {
eprintln!("[cargo-brief] Resolving workspace for '{name}'...");
}
let (workspace, resolved_version) =
remote::resolve_workspace(spec, args.remote.features.as_deref(), args.remote.no_cache)
.with_context(|| format!("Failed to create workspace for '{name}'"))?;
let manifest_path = workspace
.path()
.join("Cargo.toml")
.to_string_lossy()
.into_owned();
let metadata = resolve::load_cargo_metadata(Some(&manifest_path))
.context("Failed to load cargo metadata for remote crate")?;
let crate_header = build_remote_crate_header(
&name,
resolved_version.as_deref(),
workspace.path(),
args.remote.features.as_deref(),
);
Ok(PipelineContext {
manifest_path: Some(manifest_path),
target_dir: metadata.target_dir,
package_name: name,
module_path,
observer_package: None, toolchain: args.global.toolchain.clone(),
verbose: args.global.verbose,
use_cache: true, workspace_members: HashSet::new(), crate_header,
_workspace: Some(workspace),
})
}
fn run_shared_api_pipeline(ctx: &PipelineContext, args: &ApiArgs) -> Result<String> {
let (model, same_crate, reachable) = generate_and_parse_model(ctx)?;
let has_cross_crate = cross_crate::root_has_cross_crate_reexports(&model);
let mut output = if let Some(ref module_path) = ctx.module_path {
if model.find_module(module_path).is_some() {
render_and_expand_globs(
&model,
Some(module_path),
args,
ctx,
same_crate,
reachable.as_ref(),
)?
} else {
if ctx.verbose {
eprintln!(
"[cargo-brief] Module '{module_path}' not found locally, trying cross-crate resolution..."
);
}
if let Some(resolution) = cross_crate::resolve_cross_crate_module(
&model,
module_path,
&ctx.toolchain,
ctx.manifest_path.as_deref(),
&ctx.target_dir,
ctx.verbose,
) {
let sub_reachable = Some(compute_reachable_set(&resolution.model));
let mut output = render::render_module_api(
&resolution.model,
resolution.inner_module_path.as_deref(),
args,
None,
false,
sub_reachable.as_ref(),
);
let result = expand_glob_reexports(
&resolution.model,
resolution.inner_module_path.as_deref(),
&ctx.toolchain,
ctx.manifest_path.as_deref(),
&ctx.target_dir,
ctx.verbose,
&ctx.workspace_members,
);
apply_glob_expansions(&mut output, &result, args.expand_glob, &args.filter);
output
} else {
render_and_expand_globs(
&model,
Some(module_path),
args,
ctx,
same_crate,
reachable.as_ref(),
)?
}
}
} else if args.recursive && has_cross_crate {
let mut output =
render_and_expand_globs(&model, None, args, ctx, same_crate, reachable.as_ref())?;
if ctx.verbose {
eprintln!("[cargo-brief] Discovering cross-crate re-exports...");
}
let sub_crates = cross_crate::discover_all_reexported_crates(
&model,
&ctx.toolchain,
ctx.manifest_path.as_deref(),
&ctx.target_dir,
ctx.verbose,
);
for sub in &sub_crates {
let sub_reachable = Some(compute_reachable_set(&sub.model));
let sub_output = render::render_module_api(
&sub.model,
None,
args,
None,
false,
sub_reachable.as_ref(),
);
output.push_str(&format!(
"\n// --- module {} (from sub-crate {}) ---\n",
sub.display_name,
sub.model.crate_name()
));
output.push_str(&sub_output);
}
output
} else {
render_and_expand_globs(
&model,
ctx.module_path.as_deref(),
args,
ctx,
same_crate,
reachable.as_ref(),
)?
};
if let Some(header) = &ctx.crate_header
&& let Some(first_newline) = output.find('\n')
{
let first_line = &output[..first_newline];
if first_line.starts_with("// crate ") {
output.replace_range(..first_newline, header);
}
}
Ok(output)
}
fn render_and_expand_globs(
model: &CrateModel,
module_path: Option<&str>,
args: &ApiArgs,
ctx: &PipelineContext,
same_crate: bool,
reachable: Option<&HashSet<Id>>,
) -> Result<String> {
let mut output = render::render_module_api(
model,
module_path,
args,
if same_crate {
args.target.at_mod.as_deref()
} else {
None
},
same_crate,
reachable,
);
let result = expand_glob_reexports(
model,
module_path,
&ctx.toolchain,
ctx.manifest_path.as_deref(),
&ctx.target_dir,
ctx.verbose,
&ctx.workspace_members,
);
apply_glob_expansions(&mut output, &result, args.expand_glob, &args.filter);
Ok(output)
}
pub fn run_search_pipeline(args: &SearchArgs) -> Result<String> {
let args =
if args.remote.crates.is_some() && args.patterns.is_empty() && args.crate_name != "self" {
let mut args = args.clone();
args.patterns = vec![std::mem::take(&mut args.crate_name)];
args.crate_name = "self".to_string();
std::borrow::Cow::Owned(args)
} else {
std::borrow::Cow::Borrowed(args)
};
let args = args.as_ref();
if args.patterns.is_empty() && args.methods_of.is_none() {
anyhow::bail!("search requires a pattern or --methods-of <TYPE>");
}
if args.methods_of.is_some() {
let mut args = args.clone();
if args.patterns.is_empty() {
args.patterns = vec![args.methods_of.as_ref().unwrap().clone()];
}
args.filter.no_structs = true;
args.filter.no_enums = true;
args.filter.no_traits = true;
args.filter.no_unions = true;
args.filter.no_constants = true;
args.filter.no_macros = true;
args.filter.no_aliases = true;
return run_search_pipeline(&args);
}
let ctx = if let Some(spec) = &args.remote.crates {
build_remote_context_search(args, spec)?
} else {
build_local_context_search(args)?
};
run_shared_search_pipeline(&ctx, args)
}
fn build_local_context_search(args: &SearchArgs) -> Result<PipelineContext> {
if args.global.verbose {
eprintln!("[cargo-brief] Resolving target '{}'...", args.crate_name);
}
let metadata = resolve::load_cargo_metadata(args.manifest_path.as_deref())
.context("Failed to load cargo metadata")?;
let resolved = resolve::resolve_target(&args.crate_name, None, &metadata)
.context("Failed to resolve target")?;
let observer_package = args.at_package.clone().or(metadata.current_package.clone());
Ok(PipelineContext {
manifest_path: args.manifest_path.clone(),
target_dir: metadata.target_dir,
package_name: resolved.package_name,
module_path: None, observer_package,
toolchain: args.global.toolchain.clone(),
verbose: args.global.verbose,
use_cache: false, workspace_members: metadata.workspace_packages.into_iter().collect(),
crate_header: None,
_workspace: None,
})
}
fn build_remote_context_search(args: &SearchArgs, spec: &str) -> Result<PipelineContext> {
let (name, _) = remote::parse_crate_spec(spec);
if args.global.verbose {
eprintln!("[cargo-brief] Resolving workspace for '{name}'...");
}
let (workspace, _resolved_version) =
remote::resolve_workspace(spec, args.remote.features.as_deref(), args.remote.no_cache)
.with_context(|| format!("Failed to create workspace for '{name}'"))?;
let manifest_path = workspace
.path()
.join("Cargo.toml")
.to_string_lossy()
.into_owned();
let metadata = resolve::load_cargo_metadata(Some(&manifest_path))
.context("Failed to load cargo metadata for remote crate")?;
Ok(PipelineContext {
manifest_path: Some(manifest_path),
target_dir: metadata.target_dir,
package_name: name,
module_path: None, observer_package: None, toolchain: args.global.toolchain.clone(),
verbose: args.global.verbose,
use_cache: true, workspace_members: HashSet::new(),
crate_header: None,
_workspace: Some(workspace),
})
}
fn run_shared_search_pipeline(ctx: &PipelineContext, args: &SearchArgs) -> Result<String> {
let (model, same_crate, reachable) = generate_and_parse_model(ctx)?;
let pattern = args.pattern();
let methods_of = args.methods_of.as_deref();
let search_kind = args.search_kind.as_deref();
let search_fn = |model: &CrateModel,
observer: Option<&str>,
same_crate: bool,
reachable: Option<&HashSet<Id>>| {
search::render_search_filtered(
model,
&pattern,
&args.filter,
args.limit.as_deref(),
observer,
same_crate,
reachable,
methods_of,
search_kind,
)
};
let mut output = search_fn(
&model,
if same_crate {
args.at_mod.as_deref()
} else {
None
},
same_crate,
reachable.as_ref(),
);
if cross_crate::root_has_cross_crate_reexports(&model) {
if ctx.verbose {
eprintln!("[cargo-brief] Discovering cross-crate re-exports...");
}
let sub_crates = cross_crate::discover_all_reexported_crates(
&model,
&ctx.toolchain,
ctx.manifest_path.as_deref(),
&ctx.target_dir,
ctx.verbose,
);
for sub in &sub_crates {
let sub_reachable = Some(compute_reachable_set(&sub.model));
let sub_output = search_fn(&sub.model, None, false, sub_reachable.as_ref());
if !ctx.verbose && sub_output.contains("(0 results)") {
continue;
}
output.push_str(&sub_output);
}
}
Ok(output)
}
pub fn run_examples_pipeline(args: &ExamplesArgs) -> Result<String> {
let args =
if args.remote.crates.is_some() && args.patterns.is_empty() && args.crate_name != "self" {
let mut args = args.clone();
args.patterns = vec![std::mem::take(&mut args.crate_name)];
args.crate_name = "self".to_string();
std::borrow::Cow::Owned(args)
} else {
std::borrow::Cow::Borrowed(args)
};
let args = args.as_ref();
if let Some(spec) = &args.remote.crates {
let (name, _) = remote::parse_crate_spec(spec);
if args.global.verbose {
eprintln!("[cargo-brief] Resolving workspace for '{name}'...");
}
let (workspace, resolved_version) =
remote::resolve_workspace(spec, None, args.remote.no_cache)
.with_context(|| format!("Failed to create workspace for '{name}'"))?;
let manifest_path = workspace
.path()
.join("Cargo.toml")
.to_string_lossy()
.into_owned();
if args.global.verbose {
eprintln!("[cargo-brief] Finding source root for '{name}'...");
}
let source_root = resolve::find_dep_source_root(&manifest_path, &name)
.with_context(|| format!("Failed to find source root for '{name}'"))?;
let version =
resolved_version.or_else(|| remote::resolve_crate_version(workspace.path(), &name));
let crate_display = match version {
Some(v) => format!("{name}[{v}]"),
None => name.clone(),
};
Ok(examples::render_examples(
&source_root,
&crate_display,
args,
))
} else {
let metadata = resolve::load_cargo_metadata(args.manifest_path.as_deref())
.context("Failed to load cargo metadata")?;
let (pkg_name, source_root) = if args.crate_name == "self" {
let pkg = metadata.current_package.as_ref().ok_or_else(|| {
anyhow::anyhow!(
"Cannot resolve 'self': no package found for the current directory."
)
})?;
let dir = metadata
.package_manifest_dirs
.get(pkg)
.cloned()
.or(metadata.current_package_manifest_dir.clone())
.ok_or_else(|| {
anyhow::anyhow!("Cannot find manifest directory for package '{pkg}'")
})?;
(pkg.clone(), dir)
} else {
let normalized = args.crate_name.replace('-', "_");
let found = metadata
.package_manifest_dirs
.iter()
.find(|(k, _)| k.replace('-', "_") == normalized);
match found {
Some((name, dir)) => (name.clone(), dir.clone()),
None => {
anyhow::bail!(
"Package '{}' not found in workspace. Available: {}",
args.crate_name,
metadata.workspace_packages.join(", ")
);
}
}
};
if args.global.verbose {
eprintln!("[cargo-brief] Scanning examples for '{pkg_name}'...");
}
Ok(examples::render_examples(&source_root, &pkg_name, args))
}
}
pub fn run_summary_pipeline(args: &SummaryArgs) -> Result<String> {
let ctx = if let Some(spec) = &args.remote.crates {
build_remote_context_summary(args, spec)?
} else {
build_local_context_summary(args)?
};
run_shared_summary_pipeline(&ctx)
}
fn build_local_context_summary(args: &SummaryArgs) -> Result<PipelineContext> {
if args.global.verbose {
eprintln!(
"[cargo-brief] Resolving target '{}'...",
args.target.crate_name
);
}
let metadata = resolve::load_cargo_metadata(args.target.manifest_path.as_deref())
.context("Failed to load cargo metadata")?;
let resolved = resolve::resolve_target(
&args.target.crate_name,
args.target.module_path.as_deref(),
&metadata,
)
.context("Failed to resolve target")?;
let observer_package = args
.target
.at_package
.clone()
.or(metadata.current_package.clone());
Ok(PipelineContext {
manifest_path: args.target.manifest_path.clone(),
target_dir: metadata.target_dir,
package_name: resolved.package_name,
module_path: resolved.module_path,
observer_package,
toolchain: args.global.toolchain.clone(),
verbose: args.global.verbose,
use_cache: false,
workspace_members: metadata.workspace_packages.into_iter().collect(),
crate_header: None,
_workspace: None,
})
}
fn build_remote_context_summary(args: &SummaryArgs, spec: &str) -> Result<PipelineContext> {
let module_path = if args.target.crate_name != "self" && args.target.module_path.is_none() {
let name = &args.target.crate_name;
if let Some(idx) = name.find("::") {
let rest = &name[idx + 2..];
if rest.is_empty() {
None
} else {
Some(rest.to_string())
}
} else {
Some(name.clone())
}
} else {
args.target.module_path.clone()
};
let (name, _) = remote::parse_crate_spec(spec);
if args.global.verbose {
eprintln!("[cargo-brief] Resolving workspace for '{name}'...");
}
let (workspace, resolved_version) =
remote::resolve_workspace(spec, args.remote.features.as_deref(), args.remote.no_cache)
.with_context(|| format!("Failed to create workspace for '{name}'"))?;
let manifest_path = workspace
.path()
.join("Cargo.toml")
.to_string_lossy()
.into_owned();
let metadata = resolve::load_cargo_metadata(Some(&manifest_path))
.context("Failed to load cargo metadata for remote crate")?;
let crate_header = build_remote_crate_header(
&name,
resolved_version.as_deref(),
workspace.path(),
args.remote.features.as_deref(),
);
Ok(PipelineContext {
manifest_path: Some(manifest_path),
target_dir: metadata.target_dir,
package_name: name,
module_path,
observer_package: None,
toolchain: args.global.toolchain.clone(),
verbose: args.global.verbose,
use_cache: true,
workspace_members: HashSet::new(),
crate_header,
_workspace: Some(workspace),
})
}
fn run_shared_summary_pipeline(ctx: &PipelineContext) -> Result<String> {
let (model, same_crate, reachable) = generate_and_parse_model(ctx)?;
let mut output = summary::render_summary(
&model,
ctx.module_path.as_deref(),
same_crate,
reachable.as_ref(),
);
if ctx.module_path.is_none() && cross_crate::root_has_cross_crate_reexports(&model) {
if ctx.verbose {
eprintln!("[cargo-brief] Discovering cross-crate re-exports...");
}
let sub_crates = cross_crate::discover_all_reexported_crates(
&model,
&ctx.toolchain,
ctx.manifest_path.as_deref(),
&ctx.target_dir,
ctx.verbose,
);
for sub in &sub_crates {
let sub_reachable = Some(compute_reachable_set(&sub.model));
let sub_output =
summary::render_summary(&sub.model, None, false, sub_reachable.as_ref());
summary::merge_sub_crate_summary(&mut output, &sub_output, &sub.display_name);
}
}
if let Some(header) = &ctx.crate_header
&& let Some(first_newline) = output.find('\n')
{
let first_line = &output[..first_newline];
if first_line.starts_with("// crate ") {
output.replace_range(..first_newline, header);
}
}
Ok(output)
}
fn build_remote_crate_header(
crate_name: &str,
resolved_version: Option<&str>,
workspace_dir: &Path,
features: Option<&str>,
) -> Option<String> {
let version = resolved_version
.map(|v| v.to_string())
.or_else(|| remote::resolve_crate_version(workspace_dir, crate_name))?;
let mut header = format!("// crate {crate_name}[{version}]");
if let Some(feats) = features {
let feat_list: Vec<&str> = feats.split(',').map(|s| s.trim()).collect();
let formatted = feat_list
.iter()
.map(|f| format!("\"{f}\""))
.collect::<Vec<_>>()
.join(", ");
header.push_str(&format!(" features = [{formatted}]"));
}
Some(header)
}
fn apply_glob_expansions(
output: &mut String,
result: &GlobExpansionResult,
expand_glob: bool,
filter: &FilterArgs,
) {
if expand_glob && !result.source_models.is_empty() {
let mut seen_names = HashSet::new();
for (source, models) in &result.source_models {
let mut rendered = String::new();
for model in models {
rendered.push_str(&render::render_inlined_items(
model,
filter,
&mut seen_names,
));
}
let pattern = format!("pub use {source}::*;");
replace_glob_lines(output, &pattern, &rendered);
}
} else if !result.item_names.is_empty() {
for (source, items) in &result.item_names {
let pattern = format!("pub use {source}::*;");
let mut replacement = String::new();
for name in items {
replacement.push_str(&format!("pub use {source}::{name};\n"));
}
replace_glob_lines(output, &pattern, &replacement);
}
}
}
fn replace_glob_lines(output: &mut String, pattern: &str, replacement: &str) {
loop {
let Some((start, end, indent)) = find_normalized_line(output, pattern) else {
break;
};
let indented: String = replacement
.lines()
.map(|l| {
if l.is_empty() {
"\n".to_string()
} else {
format!("{indent}{l}\n")
}
})
.collect();
output.replace_range(start..end, &indented);
}
}
fn find_normalized_line(text: &str, pattern: &str) -> Option<(usize, usize, String)> {
let mut start = 0;
for line in text.split('\n') {
let end = start + line.len() + 1; let normalized: String = line.split_whitespace().collect::<Vec<_>>().join(" ");
if normalized == pattern {
let indent = &line[..line.len() - line.trim_start().len()];
return Some((start, end.min(text.len()), indent.to_string()));
}
start = end;
}
None
}
fn try_generate_rustdoc_json(
source: &str,
toolchain: &str,
manifest_path: Option<&str>,
target_dir: &Path,
verbose: bool,
use_cache: bool,
) -> Option<PathBuf> {
if let Ok(path) = rustdoc_json::generate_rustdoc_json(
source,
toolchain,
manifest_path,
false,
target_dir,
verbose,
use_cache,
) {
return Some(path);
}
let hyphenated = source.replace('_', "-");
if hyphenated != source {
if let Ok(path) = rustdoc_json::generate_rustdoc_json(
&hyphenated,
toolchain,
manifest_path,
false,
target_dir,
verbose,
use_cache,
) {
return Some(path);
}
}
None
}
fn expand_glob_reexports(
model: &CrateModel,
target_module_path: Option<&str>,
toolchain: &str,
manifest_path: Option<&str>,
target_dir: &Path,
verbose: bool,
workspace_members: &HashSet<String>,
) -> GlobExpansionResult {
let target_item = if let Some(path) = target_module_path {
model.find_module(path)
} else {
model.root_module()
};
let Some(target_item) = target_item else {
return GlobExpansionResult {
item_names: HashMap::new(),
source_models: HashMap::new(),
};
};
let mut item_names = HashMap::new();
let mut source_models = HashMap::new();
for (_id, child) in model.module_children(target_item) {
let ItemEnum::Use(use_item) = &child.inner else {
continue;
};
if !use_item.is_glob {
continue;
}
let source = &use_item.source;
let dep_use_cache = !workspace_members.contains(source.as_str())
&& !workspace_members.contains(&source.replace('_', "-"));
let Some(json_path) = try_generate_rustdoc_json(
source,
toolchain,
manifest_path,
target_dir,
verbose,
dep_use_cache,
) else {
continue;
};
let Ok(source_krate) = rustdoc_json::parse_rustdoc_json_cached(&json_path) else {
continue;
};
let source_model = CrateModel::from_crate(source_krate);
let mut all_items = Vec::new();
let mut all_models = Vec::new();
let mut visited = HashSet::new();
visited.insert(source.clone());
collect_glob_items_recursive(
&source_model,
toolchain,
manifest_path,
target_dir,
verbose,
workspace_members,
&mut visited,
&mut all_items,
&mut all_models,
0,
);
all_items.sort();
all_items.dedup();
let mut models = vec![source_model];
models.extend(all_models);
item_names.insert(source.clone(), all_items);
source_models.insert(source.clone(), models);
}
GlobExpansionResult {
item_names,
source_models,
}
}
fn collect_glob_items_recursive(
source_model: &CrateModel,
toolchain: &str,
manifest_path: Option<&str>,
target_dir: &Path,
verbose: bool,
workspace_members: &HashSet<String>,
visited: &mut HashSet<String>,
all_items: &mut Vec<String>,
all_models: &mut Vec<CrateModel>,
depth: usize,
) {
const MAX_DEPTH: usize = 8;
let Some(root) = source_model.root_module() else {
return;
};
for (_, child) in source_model.module_children(root) {
if !matches!(child.visibility, Visibility::Public) {
continue;
}
if matches!(child.inner, ItemEnum::Module(_)) {
continue;
}
if let ItemEnum::Use(use_item) = &child.inner {
if use_item.is_glob {
if depth >= MAX_DEPTH {
continue;
}
let nested_source = &use_item.source;
if !visited.insert(nested_source.clone()) {
continue; }
if verbose {
eprintln!(
"[cargo-brief] Following nested glob re-export: {nested_source} (depth {})",
depth + 1
);
}
let nested_use_cache = !workspace_members.contains(nested_source.as_str())
&& !workspace_members.contains(&nested_source.replace('_', "-"));
let Some(json_path) = try_generate_rustdoc_json(
nested_source,
toolchain,
manifest_path,
target_dir,
verbose,
nested_use_cache,
) else {
continue; };
let Ok(nested_krate) = rustdoc_json::parse_rustdoc_json_cached(&json_path) else {
continue;
};
let nested_model = CrateModel::from_crate(nested_krate);
collect_glob_items_recursive(
&nested_model,
toolchain,
manifest_path,
target_dir,
verbose,
workspace_members,
visited,
all_items,
all_models,
depth + 1,
);
all_models.push(nested_model);
} else {
if let Some(name) = child.name.as_ref().or(Some(&use_item.name)) {
all_items.push(name.clone());
}
}
} else {
if let Some(name) = &child.name {
all_items.push(name.clone());
}
}
}
}