use std::path::Path;
use tree_sitter::{Node, Tree};
use walkdir::WalkDir;
use crate::ast::parser::parse_file;
use crate::semantic::types::{ChunkGranularity, ChunkOptions, CodeChunk};
use crate::{Language, TldrError, TldrResult};
pub const DEFAULT_MAX_CHUNK_SIZE: usize = 4000;
const BINARY_EXTENSIONS: &[&str] = &[
"exe", "dll", "so", "dylib", "a", "lib", "o", "obj", "png", "jpg", "jpeg", "gif", "bmp", "ico", "svg", "webp", "pdf", "doc", "docx", "xls", "xlsx", "ppt", "pptx", "zip", "tar", "gz", "rar", "7z", "bz2", "mp3", "mp4", "wav", "avi", "mov", "mkv", "wasm", "pyc", "pyo", "class", "db", "sqlite", "sqlite3", "ttf", "otf", "woff", "woff2", "eot", ];
const HIDDEN_PREFIXES: &[&str] = &[".", "_"];
const SKIP_DIRECTORIES: &[&str] = &[
"node_modules",
"target",
"__pycache__",
".git",
".hg",
".svn",
"venv",
".venv",
"env",
".env",
"dist",
"build",
"vendor",
];
#[derive(Debug, Clone, Default)]
pub struct ChunkResult {
pub chunks: Vec<CodeChunk>,
pub skipped: Vec<SkippedFile>,
}
#[derive(Debug, Clone)]
pub struct SkippedFile {
pub path: String,
pub reason: String,
}
pub fn chunk_code<P: AsRef<Path>>(path: P, options: &ChunkOptions) -> TldrResult<ChunkResult> {
let path = path.as_ref();
if !path.exists() {
return Err(TldrError::PathNotFound(path.to_path_buf()));
}
if path.is_file() {
chunk_file(path, options)
} else if path.is_dir() {
chunk_directory(path, options)
} else {
Err(TldrError::PathNotFound(path.to_path_buf()))
}
}
pub fn chunk_file<P: AsRef<Path>>(path: P, options: &ChunkOptions) -> TldrResult<ChunkResult> {
let path = path.as_ref();
let mut chunks = Vec::new();
let mut skipped = Vec::new();
if is_binary_or_hidden(path) {
skipped.push(SkippedFile {
path: path.display().to_string(),
reason: "Binary or hidden file".into(),
});
return Ok(ChunkResult { chunks, skipped });
}
let language = match Language::from_path(path) {
Some(lang) => lang,
None => {
skipped.push(SkippedFile {
path: path.display().to_string(),
reason: format!(
"Unknown language for extension: {}",
path.extension()
.map(|e| e.to_string_lossy().to_string())
.unwrap_or_else(|| "none".into())
),
});
return Ok(ChunkResult { chunks, skipped });
}
};
if let Some(ref langs) = options.languages {
if !langs.contains(&language) {
skipped.push(SkippedFile {
path: path.display().to_string(),
reason: format!("Filtered out by language ({})", language),
});
return Ok(ChunkResult { chunks, skipped });
}
}
let content = match std::fs::read_to_string(path) {
Ok(c) => c,
Err(e) => {
skipped.push(SkippedFile {
path: path.display().to_string(),
reason: format!("Read error: {}", e),
});
return Ok(ChunkResult { chunks, skipped });
}
};
let parse_result = parse_file(path);
match options.granularity {
ChunkGranularity::File => {
chunks.push(create_file_chunk(path, &content, language, options));
}
ChunkGranularity::Function => {
match parse_result {
Ok((tree, source, lang)) => {
let functions = extract_function_chunks(&tree, &source, path, lang, options);
if functions.is_empty() {
chunks.push(create_file_chunk(path, &content, language, options));
} else {
chunks.extend(functions);
}
}
Err(e) => {
eprintln!(
"Warning: Parse failed for {}, using file-level chunk: {}",
path.display(),
e
);
chunks.push(create_file_chunk(path, &content, language, options));
}
}
}
}
Ok(ChunkResult { chunks, skipped })
}
fn chunk_directory<P: AsRef<Path>>(path: P, options: &ChunkOptions) -> TldrResult<ChunkResult> {
let path = path.as_ref();
let mut all_chunks = Vec::new();
let mut all_skipped = Vec::new();
for entry in WalkDir::new(path)
.follow_links(false) .into_iter()
.filter_entry(|e| e.depth() == 0 || !should_skip_entry(e))
.filter_map(|e| e.ok())
{
if entry.file_type().is_file() {
match chunk_file(entry.path(), options) {
Ok(result) => {
all_chunks.extend(result.chunks);
all_skipped.extend(result.skipped);
}
Err(e) => {
all_skipped.push(SkippedFile {
path: entry.path().display().to_string(),
reason: format!("Error: {}", e),
});
}
}
}
}
Ok(ChunkResult {
chunks: all_chunks,
skipped: all_skipped,
})
}
fn should_skip_entry(entry: &walkdir::DirEntry) -> bool {
let name = entry.file_name().to_string_lossy();
for prefix in HIDDEN_PREFIXES {
if name.starts_with(prefix) {
return true;
}
}
if entry.file_type().is_dir() {
for skip_dir in SKIP_DIRECTORIES {
if name == *skip_dir {
return true;
}
}
}
false
}
fn is_binary_or_hidden(path: &Path) -> bool {
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
for prefix in HIDDEN_PREFIXES {
if name.starts_with(prefix) {
return true;
}
}
}
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
let ext_lower = ext.to_lowercase();
for binary_ext in BINARY_EXTENSIONS {
if ext_lower == *binary_ext {
return true;
}
}
}
false
}
fn create_file_chunk(
path: &Path,
content: &str,
language: Language,
options: &ChunkOptions,
) -> CodeChunk {
let max_size = if options.max_chunk_size > 0 {
Some(options.max_chunk_size)
} else {
Some(DEFAULT_MAX_CHUNK_SIZE)
};
let (final_content, _truncated) = truncate_if_needed(content, max_size);
let line_count = content.lines().count();
CodeChunk {
file_path: path.to_path_buf(),
function_name: None,
class_name: None,
line_start: 1,
line_end: line_count.max(1) as u32,
content: final_content,
content_hash: compute_hash(content),
language,
}
}
fn truncate_if_needed(content: &str, max_size: Option<usize>) -> (String, bool) {
match max_size {
Some(max) if content.len() > max => {
let truncated = content
.char_indices()
.take_while(|(i, _)| *i < max)
.map(|(_, c)| c)
.collect::<String>();
(truncated, true)
}
_ => (content.to_string(), false),
}
}
fn compute_hash(content: &str) -> String {
format!("{:x}", md5::compute(content.as_bytes()))
}
struct ExtractedFunction {
name: String,
class_name: Option<String>,
line_start: u32,
line_end: u32,
content: String,
}
fn extract_function_chunks(
tree: &Tree,
source: &str,
path: &Path,
language: Language,
options: &ChunkOptions,
) -> Vec<CodeChunk> {
let root = tree.root_node();
let mut functions = Vec::new();
match language {
Language::Python => extract_python_all_functions(&root, source, &mut functions),
Language::TypeScript | Language::JavaScript => {
extract_ts_all_functions(&root, source, &mut functions)
}
Language::Rust => extract_rust_all_functions(&root, source, &mut functions),
Language::Go => extract_go_all_functions(&root, source, &mut functions),
Language::Java => extract_java_all_functions(&root, source, &mut functions),
_ => {}
}
let max_size = if options.max_chunk_size > 0 {
Some(options.max_chunk_size)
} else {
Some(DEFAULT_MAX_CHUNK_SIZE)
};
functions
.into_iter()
.map(|func| {
let (final_content, _truncated) = truncate_if_needed(&func.content, max_size);
CodeChunk {
file_path: path.to_path_buf(),
function_name: Some(func.name),
class_name: func.class_name,
line_start: func.line_start,
line_end: func.line_end,
content: final_content,
content_hash: compute_hash(&func.content),
language,
}
})
.collect()
}
fn get_node_text(node: &Node, source: &str) -> String {
source[node.byte_range()].to_string()
}
fn get_line_range(node: &Node) -> (u32, u32) {
let start = node.start_position().row + 1;
let end = node.end_position().row + 1;
(start as u32, end as u32)
}
fn extract_python_all_functions(node: &Node, source: &str, functions: &mut Vec<ExtractedFunction>) {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"function_definition" => {
if let Some(name_node) = child.child_by_field_name("name") {
let name = get_node_text(&name_node, source);
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
let class_name = get_enclosing_class_name(&child, source);
functions.push(ExtractedFunction {
name,
class_name,
line_start,
line_end,
content,
});
}
if let Some(body) = child.child_by_field_name("body") {
extract_python_all_functions(&body, source, functions);
}
}
"lambda" => {
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
let name = get_lambda_name(&child, source).unwrap_or_else(|| {
format!("<lambda:{}:{}>", line_start, child.start_position().column)
});
functions.push(ExtractedFunction {
name,
class_name: None,
line_start,
line_end,
content,
});
}
"class_definition" => {
if let Some(body) = child.child_by_field_name("body") {
extract_python_all_functions(&body, source, functions);
}
}
_ => {
extract_python_all_functions(&child, source, functions);
}
}
}
}
fn extract_ts_all_functions(node: &Node, source: &str, functions: &mut Vec<ExtractedFunction>) {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"function_declaration" | "function" => {
if let Some(name_node) = child.child_by_field_name("name") {
let name = get_node_text(&name_node, source);
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
functions.push(ExtractedFunction {
name,
class_name: get_enclosing_class_name(&child, source),
line_start,
line_end,
content,
});
}
if let Some(body) = child.child_by_field_name("body") {
extract_ts_all_functions(&body, source, functions);
}
}
"method_definition" => {
if let Some(name_node) = child.child_by_field_name("name") {
let name = get_node_text(&name_node, source);
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
functions.push(ExtractedFunction {
name,
class_name: get_enclosing_class_name(&child, source),
line_start,
line_end,
content,
});
}
}
"arrow_function" => {
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
let name = get_arrow_function_name(&child, source).unwrap_or_else(|| {
format!("<arrow:{}:{}>", line_start, child.start_position().column)
});
functions.push(ExtractedFunction {
name,
class_name: get_enclosing_class_name(&child, source),
line_start,
line_end,
content,
});
if let Some(body) = child.child_by_field_name("body") {
extract_ts_all_functions(&body, source, functions);
}
}
"class_declaration" | "class" => {
if let Some(body) = child.child_by_field_name("body") {
extract_ts_all_functions(&body, source, functions);
}
}
_ => {
extract_ts_all_functions(&child, source, functions);
}
}
}
}
fn get_arrow_function_name(node: &Node, source: &str) -> Option<String> {
if let Some(parent) = node.parent() {
if parent.kind() == "variable_declarator" {
if let Some(name_node) = parent.child_by_field_name("name") {
return Some(get_node_text(&name_node, source));
}
}
}
None
}
fn extract_rust_all_functions(node: &Node, source: &str, functions: &mut Vec<ExtractedFunction>) {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"function_item" => {
if let Some(name_node) = child.child_by_field_name("name") {
let name = get_node_text(&name_node, source);
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
let class_name = get_rust_impl_type(&child, source);
functions.push(ExtractedFunction {
name,
class_name,
line_start,
line_end,
content,
});
}
if let Some(body) = child.child_by_field_name("body") {
extract_rust_all_functions(&body, source, functions);
}
}
"closure_expression" => {
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
let name = get_rust_closure_name(&child, source).unwrap_or_else(|| {
format!("<closure:{}:{}>", line_start, child.start_position().column)
});
functions.push(ExtractedFunction {
name,
class_name: None,
line_start,
line_end,
content,
});
}
"impl_item" => {
if let Some(body) = child.child_by_field_name("body") {
extract_rust_all_functions(&body, source, functions);
}
}
_ => {
extract_rust_all_functions(&child, source, functions);
}
}
}
}
fn get_rust_impl_type(node: &Node, source: &str) -> Option<String> {
let mut current = node.parent();
while let Some(parent) = current {
if parent.kind() == "impl_item" {
if let Some(type_node) = parent.child_by_field_name("type") {
return Some(get_node_text(&type_node, source));
}
}
current = parent.parent();
}
None
}
fn get_rust_closure_name(node: &Node, source: &str) -> Option<String> {
if let Some(parent) = node.parent() {
if parent.kind() == "let_declaration" {
if let Some(pattern) = parent.child_by_field_name("pattern") {
return Some(get_node_text(&pattern, source));
}
}
}
None
}
fn extract_go_all_functions(node: &Node, source: &str, functions: &mut Vec<ExtractedFunction>) {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"function_declaration" => {
if let Some(name_node) = child.child_by_field_name("name") {
let name = get_node_text(&name_node, source);
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
functions.push(ExtractedFunction {
name,
class_name: None,
line_start,
line_end,
content,
});
}
}
"method_declaration" => {
if let Some(name_node) = child.child_by_field_name("name") {
let name = get_node_text(&name_node, source);
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
let class_name = child
.child_by_field_name("receiver")
.and_then(|r| get_go_receiver_type(&r, source));
functions.push(ExtractedFunction {
name,
class_name,
line_start,
line_end,
content,
});
}
}
"func_literal" => {
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
let name = format!("<func:{}:{}>", line_start, child.start_position().column);
functions.push(ExtractedFunction {
name,
class_name: None,
line_start,
line_end,
content,
});
}
_ => {
extract_go_all_functions(&child, source, functions);
}
}
}
}
fn get_go_receiver_type(receiver: &Node, source: &str) -> Option<String> {
let mut cursor = receiver.walk();
for child in receiver.children(&mut cursor) {
if child.kind() == "parameter_declaration" {
if let Some(type_node) = child.child_by_field_name("type") {
let type_text = get_node_text(&type_node, source);
return Some(type_text.trim_start_matches('*').to_string());
}
}
}
None
}
fn extract_java_all_functions(node: &Node, source: &str, functions: &mut Vec<ExtractedFunction>) {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"method_declaration" | "constructor_declaration" => {
if let Some(name_node) = child.child_by_field_name("name") {
let name = get_node_text(&name_node, source);
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
functions.push(ExtractedFunction {
name,
class_name: get_enclosing_class_name(&child, source),
line_start,
line_end,
content,
});
}
}
"lambda_expression" => {
let (line_start, line_end) = get_line_range(&child);
let content = get_node_text(&child, source);
let name = format!("<lambda:{}:{}>", line_start, child.start_position().column);
functions.push(ExtractedFunction {
name,
class_name: get_enclosing_class_name(&child, source),
line_start,
line_end,
content,
});
}
"class_declaration" | "interface_declaration" | "enum_declaration" => {
if let Some(body) = child.child_by_field_name("body") {
extract_java_all_functions(&body, source, functions);
}
}
_ => {
extract_java_all_functions(&child, source, functions);
}
}
}
}
fn get_enclosing_class_name(node: &Node, source: &str) -> Option<String> {
let mut current = node.parent();
while let Some(parent) = current {
match parent.kind() {
"class_definition" | "class_declaration" | "class" => {
if let Some(name_node) = parent.child_by_field_name("name") {
return Some(get_node_text(&name_node, source));
}
}
"impl_item" => {
if let Some(type_node) = parent.child_by_field_name("type") {
return Some(get_node_text(&type_node, source));
}
}
_ => {}
}
current = parent.parent();
}
None
}
fn get_lambda_name(node: &Node, source: &str) -> Option<String> {
if let Some(parent) = node.parent() {
if parent.kind() == "assignment" {
if let Some(left) = parent.child_by_field_name("left") {
return Some(get_node_text(&left, source));
}
}
if parent.kind() == "named_expression" {
if let Some(name) = parent.child_by_field_name("name") {
return Some(get_node_text(&name, source));
}
}
}
None
}
#[cfg(test)]
mod chunker_tests {
use super::*;
use std::fs;
use tempfile::TempDir;
#[test]
fn chunk_options_default_values() {
let options = ChunkOptions::default();
assert_eq!(options.granularity, ChunkGranularity::Function);
assert_eq!(options.max_chunk_size, 0); assert!(!options.include_docs);
assert!(options.languages.is_none());
}
#[test]
fn chunk_file_rust_function_extraction() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("test.rs");
fs::write(
&file_path,
r#"
fn foo() {
println!("foo");
}
fn bar(x: i32) -> i32 {
x * 2
}
impl MyStruct {
fn method(&self) {
// method
}
}
"#,
)
.unwrap();
let result = chunk_file(&file_path, &ChunkOptions::default()).unwrap();
assert!(result.skipped.is_empty());
assert!(result.chunks.len() >= 3);
let names: Vec<_> = result
.chunks
.iter()
.filter_map(|c| c.function_name.as_ref())
.collect();
assert!(names.contains(&&"foo".to_string()));
assert!(names.contains(&&"bar".to_string()));
assert!(names.contains(&&"method".to_string()));
}
#[test]
fn chunk_file_python_function_extraction() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("test.py");
fs::write(
&file_path,
r#"
def foo():
pass
def bar(x):
return x * 2
class MyClass:
def method(self):
pass
"#,
)
.unwrap();
let result = chunk_file(&file_path, &ChunkOptions::default()).unwrap();
assert!(result.skipped.is_empty());
assert!(result.chunks.len() >= 3);
let names: Vec<_> = result
.chunks
.iter()
.filter_map(|c| c.function_name.as_ref())
.collect();
assert!(names.contains(&&"foo".to_string()));
assert!(names.contains(&&"bar".to_string()));
assert!(names.contains(&&"method".to_string()));
}
#[test]
fn chunk_file_file_level_granularity() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("test.rs");
fs::write(
&file_path,
r#"
fn foo() {}
fn bar() {}
"#,
)
.unwrap();
let options = ChunkOptions {
granularity: ChunkGranularity::File,
..Default::default()
};
let result = chunk_file(&file_path, &options).unwrap();
assert_eq!(result.chunks.len(), 1);
assert!(result.chunks[0].function_name.is_none());
assert!(result.chunks[0].content.contains("fn foo()"));
assert!(result.chunks[0].content.contains("fn bar()"));
}
#[test]
fn chunk_code_directory_traversal() {
let tmp = TempDir::new().unwrap();
fs::write(tmp.path().join("a.rs"), "fn a() {}").unwrap();
fs::write(tmp.path().join("b.py"), "def b(): pass").unwrap();
let sub = tmp.path().join("sub");
fs::create_dir(&sub).unwrap();
fs::write(sub.join("c.rs"), "fn c() {}").unwrap();
let result = chunk_code(tmp.path(), &ChunkOptions::default()).unwrap();
assert!(!result.chunks.is_empty(), "Should have found some chunks");
let names: Vec<_> = result
.chunks
.iter()
.filter_map(|c| c.function_name.as_ref())
.collect();
assert!(
names.contains(&&"a".to_string()),
"Should find function 'a' from a.rs"
);
assert!(
names.contains(&&"c".to_string()),
"Should find function 'c' from sub/c.rs"
);
let has_b = names.contains(&&"b".to_string())
|| result
.chunks
.iter()
.any(|c| c.file_path.to_string_lossy().contains("b.py"));
assert!(has_b, "Should have b.py in some form");
}
#[test]
fn chunk_file_nonexistent_returns_error() {
let result = chunk_code("/nonexistent/path/to/file.rs", &ChunkOptions::default());
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), TldrError::PathNotFound(_)));
}
#[test]
fn chunk_file_binary_file_skipped() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("test.exe");
fs::write(&file_path, [0u8; 100]).unwrap();
let result = chunk_file(&file_path, &ChunkOptions::default()).unwrap();
assert!(result.chunks.is_empty());
assert_eq!(result.skipped.len(), 1);
assert!(result.skipped[0].reason.contains("Binary"));
}
#[test]
fn chunk_file_includes_content_hash() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("test.rs");
fs::write(&file_path, "fn foo() {}").unwrap();
let result = chunk_file(&file_path, &ChunkOptions::default()).unwrap();
assert!(!result.chunks.is_empty());
let chunk = &result.chunks[0];
assert!(!chunk.content_hash.is_empty());
assert!(chunk.content_hash.chars().all(|c| c.is_ascii_hexdigit()));
}
#[test]
fn chunk_file_consistent_hashing() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("test.rs");
fs::write(&file_path, "fn foo() {}").unwrap();
let result1 = chunk_file(&file_path, &ChunkOptions::default()).unwrap();
let result2 = chunk_file(&file_path, &ChunkOptions::default()).unwrap();
assert_eq!(
result1.chunks[0].content_hash,
result2.chunks[0].content_hash
);
}
#[test]
fn chunk_file_hidden_file_skipped() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join(".hidden.rs");
fs::write(&file_path, "fn foo() {}").unwrap();
let result = chunk_file(&file_path, &ChunkOptions::default()).unwrap();
assert!(result.chunks.is_empty());
assert_eq!(result.skipped.len(), 1);
assert!(result.skipped[0].reason.contains("hidden"));
}
#[test]
fn chunk_file_language_filter() {
let tmp = TempDir::new().unwrap();
let rust_file = tmp.path().join("test.rs");
let py_file = tmp.path().join("test.py");
fs::write(&rust_file, "fn foo() {}").unwrap();
fs::write(&py_file, "def bar(): pass").unwrap();
let options = ChunkOptions {
languages: Some(vec![Language::Rust]),
..Default::default()
};
let result = chunk_code(tmp.path(), &options).unwrap();
let names: Vec<_> = result
.chunks
.iter()
.filter_map(|c| c.function_name.as_ref())
.collect();
assert!(names.contains(&&"foo".to_string()));
assert!(!names.contains(&&"bar".to_string()));
assert!(result.skipped.iter().any(|s| s.path.contains("test.py")));
}
#[test]
fn chunk_file_truncation() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("test.rs");
let long_content = format!("fn foo() {{\n{}\n}}", " let x = 1;\n".repeat(500));
fs::write(&file_path, &long_content).unwrap();
let options = ChunkOptions {
max_chunk_size: 100, ..Default::default()
};
let result = chunk_file(&file_path, &options).unwrap();
assert!(!result.chunks.is_empty());
assert!(result.chunks[0].content.len() <= 100);
}
#[test]
fn chunk_file_unknown_language_skipped() {
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("test.xyz");
fs::write(&file_path, "some content").unwrap();
let result = chunk_file(&file_path, &ChunkOptions::default()).unwrap();
assert!(result.chunks.is_empty());
assert_eq!(result.skipped.len(), 1);
assert!(result.skipped[0].reason.contains("Unknown language"));
}
#[test]
fn chunk_directory_skips_node_modules() {
let tmp = TempDir::new().unwrap();
fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
let node_modules = tmp.path().join("node_modules");
fs::create_dir(&node_modules).unwrap();
fs::write(node_modules.join("dep.js"), "function dep() {}").unwrap();
let result = chunk_code(tmp.path(), &ChunkOptions::default()).unwrap();
let names: Vec<_> = result
.chunks
.iter()
.filter_map(|c| c.function_name.as_ref())
.collect();
assert!(names.contains(&&"main".to_string()));
assert!(!names.iter().any(|n| *n == "dep"));
}
}