1use crate::cache::constants::*;
7use crate::cache::storage::CacheStorage;
8use crate::cache::workspace::WorkspaceHandler;
9use crate::rustdoc;
10use crate::search::indexer::SearchIndexer;
11use anyhow::{Context, Result, bail};
12use std::path::{Path, PathBuf};
13use std::process::Command;
14
15#[derive(Debug, Clone)]
17pub struct DocGenerator {
18 storage: CacheStorage,
19}
20
21impl DocGenerator {
22 pub fn new(storage: CacheStorage) -> Self {
24 Self { storage }
25 }
26
27 fn cleanup_target_directory(&self, source_path: &Path) -> Result<()> {
29 let target_dir = source_path.join(TARGET_DIR);
30 if target_dir.exists() {
31 std::fs::remove_dir_all(&target_dir).with_context(|| {
32 format!(
33 "Failed to clean up target directory: {}",
34 target_dir.display()
35 )
36 })?;
37 tracing::info!("Cleaned up target directory to save disk space");
38 }
39 Ok(())
40 }
41
42 pub async fn generate_docs(&self, name: &str, version: &str) -> Result<PathBuf> {
44 tracing::info!(
45 "DocGenerator::generate_docs starting for {}-{}",
46 name,
47 version
48 );
49
50 let source_path = self.storage.source_path(name, version)?;
51 let docs_path = self.storage.docs_path(name, version, None)?;
52
53 if docs_path.exists() {
55 tracing::info!(
56 "Docs already exist for {}-{}, skipping generation",
57 name,
58 version
59 );
60 return Ok(docs_path);
61 }
62
63 if !source_path.exists() {
64 bail!(
65 "Source not found for {}-{}. Download it first.",
66 name,
67 version
68 );
69 }
70
71 tracing::info!("Generating documentation for {}-{}", name, version);
72
73 rustdoc::run_cargo_rustdoc_json(&source_path, None).await?;
75
76 let doc_dir = source_path.join(TARGET_DIR).join(DOC_DIR);
78 let json_file = self.find_json_doc(&doc_dir, name)?;
79
80 std::fs::copy(&json_file, &docs_path).context("Failed to copy documentation to cache")?;
82
83 self.generate_dependencies(name, version).await?;
85
86 self.storage.save_metadata(name, version)?;
88
89 self.create_search_index(name, version, None)
91 .await
92 .context("Failed to create search index")?;
93
94 self.cleanup_target_directory(&source_path)?;
96
97 tracing::info!(
98 "Successfully generated documentation for {}-{}",
99 name,
100 version
101 );
102 tracing::info!(
103 "DocGenerator::generate_docs completed for {}-{}",
104 name,
105 version
106 );
107 Ok(docs_path)
108 }
109
110 pub async fn generate_workspace_member_docs(
112 &self,
113 name: &str,
114 version: &str,
115 member_path: &str,
116 ) -> Result<PathBuf> {
117 let source_path = self.storage.source_path(name, version)?;
118 let member_full_path = source_path.join(member_path);
119
120 if !source_path.exists() {
121 bail!(
122 "Source not found for {}-{}. Download it first.",
123 name,
124 version
125 );
126 }
127
128 if !member_full_path.exists() {
129 bail!(
130 "Workspace member not found at path: {}",
131 member_full_path.display()
132 );
133 }
134
135 let member_cargo_toml = member_full_path.join(CARGO_TOML);
137 let package_name = WorkspaceHandler::get_package_name(&member_cargo_toml)?;
138
139 let docs_path = self.storage.docs_path(name, version, Some(member_path))?;
141
142 tracing::info!(
143 "Generating documentation for workspace member {} (package: {}) in {}-{}",
144 member_path,
145 package_name,
146 name,
147 version
148 );
149
150 rustdoc::run_cargo_rustdoc_json(&source_path, Some(&package_name)).await?;
152
153 let doc_dir = source_path.join(TARGET_DIR).join(DOC_DIR);
155 let json_file = self.find_json_doc(&doc_dir, &package_name)?;
156
157 if let Some(parent) = docs_path.parent() {
159 self.storage.ensure_dir(parent)?;
160 } else {
161 bail!(
162 "Invalid docs path: no parent directory for {}",
163 docs_path.display()
164 );
165 }
166
167 std::fs::copy(&json_file, &docs_path)
169 .context("Failed to copy workspace member documentation to cache")?;
170
171 self.generate_workspace_member_dependencies(name, version, member_path)
173 .await?;
174
175 self.create_search_index(name, version, Some(member_path))
177 .await
178 .context("Failed to create search index for workspace member")?;
179
180 self.cleanup_target_directory(&source_path)?;
182
183 tracing::info!(
184 "Successfully generated documentation for workspace member {} in {}-{}",
185 member_path,
186 name,
187 version
188 );
189 Ok(docs_path)
190 }
191
192 fn find_json_doc(&self, doc_dir: &Path, crate_name: &str) -> Result<PathBuf> {
194 let json_name = crate_name.replace('-', "_");
196 let json_file = doc_dir.join(format!("{json_name}.json"));
197
198 if json_file.exists() {
199 return Ok(json_file);
200 }
201
202 let entries = std::fs::read_dir(doc_dir)
204 .with_context(|| format!("Failed to read doc directory: {}", doc_dir.display()))?;
205
206 for entry in entries {
207 let entry = entry?;
208 let path = entry.path();
209 if path.extension().and_then(|s| s.to_str()) == Some("json") {
210 return Ok(path);
211 }
212 }
213
214 bail!(
215 "No JSON documentation file found for crate '{}' in {}",
216 crate_name,
217 doc_dir.display()
218 );
219 }
220
221 async fn generate_dependencies(&self, name: &str, version: &str) -> Result<()> {
223 let source_path = self.storage.source_path(name, version)?;
224 let deps_path = self.storage.dependencies_path(name, version, None)?;
225
226 tracing::info!("Generating dependency information for {}-{}", name, version);
227
228 let output = Command::new("cargo")
230 .args(["metadata", "--format-version", "1"])
231 .current_dir(&source_path)
232 .output()
233 .context("Failed to run cargo metadata")?;
234
235 if !output.status.success() {
236 let stderr = String::from_utf8_lossy(&output.stderr);
237 bail!("Failed to generate dependency metadata: {}", stderr);
238 }
239
240 tokio::fs::write(&deps_path, &output.stdout)
242 .await
243 .context("Failed to write dependencies to cache")?;
244
245 Ok(())
246 }
247
248 async fn generate_workspace_member_dependencies(
250 &self,
251 name: &str,
252 version: &str,
253 member_path: &str,
254 ) -> Result<()> {
255 let source_path = self.storage.source_path(name, version)?;
256 let deps_path = self
257 .storage
258 .member_path(name, version, member_path)?
259 .join(DEPENDENCIES_FILE);
260
261 tracing::info!(
262 "Generating dependency information for workspace member {} in {}-{}",
263 member_path,
264 name,
265 version
266 );
267
268 let member_cargo_toml = source_path.join(member_path).join(CARGO_TOML);
270
271 let output = Command::new("cargo")
273 .args([
274 "metadata",
275 "--format-version",
276 "1",
277 "--manifest-path",
278 &member_cargo_toml.to_string_lossy(),
279 ])
280 .output()
281 .context("Failed to run cargo metadata")?;
282
283 if !output.status.success() {
284 let stderr = String::from_utf8_lossy(&output.stderr);
285 bail!("Failed to generate dependency metadata: {}", stderr);
286 }
287
288 if let Some(parent) = deps_path.parent() {
290 self.storage.ensure_dir(parent)?;
291 } else {
292 bail!(
293 "Invalid deps path: no parent directory for {}",
294 deps_path.display()
295 );
296 }
297
298 tokio::fs::write(&deps_path, &output.stdout)
300 .await
301 .context("Failed to write dependencies to cache")?;
302
303 Ok(())
304 }
305
306 pub async fn load_dependencies(&self, name: &str, version: &str) -> Result<serde_json::Value> {
308 let deps_path = self.storage.dependencies_path(name, version, None)?;
309
310 if !deps_path.exists() {
311 bail!("Dependencies not found for {}-{}", name, version);
312 }
313
314 let json_string = tokio::fs::read_to_string(&deps_path)
315 .await
316 .context("Failed to read dependencies file")?;
317
318 let deps: serde_json::Value =
319 serde_json::from_str(&json_string).context("Failed to parse dependencies JSON")?;
320
321 Ok(deps)
322 }
323
324 pub async fn load_docs(
326 &self,
327 name: &str,
328 version: &str,
329 member_name: Option<&str>,
330 ) -> Result<serde_json::Value> {
331 let docs_path = self.storage.docs_path(name, version, member_name)?;
332
333 if !docs_path.exists() {
334 if let Some(member) = member_name {
335 bail!(
336 "Documentation not found for workspace member {} in {}-{}",
337 member,
338 name,
339 version
340 );
341 } else {
342 bail!("Documentation not found for {}-{}", name, version);
343 }
344 }
345
346 let json_string = tokio::fs::read_to_string(&docs_path)
347 .await
348 .context("Failed to read documentation file")?;
349
350 let docs: serde_json::Value =
351 serde_json::from_str(&json_string).context("Failed to parse documentation JSON")?;
352
353 Ok(docs)
354 }
355
356 pub async fn create_search_index(
358 &self,
359 name: &str,
360 version: &str,
361 member_name: Option<&str>,
362 ) -> Result<()> {
363 let log_prefix = if let Some(member) = member_name {
364 format!("workspace member {member} in")
365 } else {
366 String::new()
367 };
368
369 tracing::info!(
370 "Creating search index for {}{}-{}",
371 log_prefix,
372 name,
373 version
374 );
375
376 let docs_path = self.storage.docs_path(name, version, member_name)?;
378
379 let docs_json = tokio::fs::read_to_string(&docs_path)
380 .await
381 .context("Failed to read documentation for indexing")?;
382
383 let crate_data: rustdoc_types::Crate = serde_json::from_str(&docs_json)
384 .context("Failed to parse documentation JSON for indexing")?;
385
386 let mut indexer = SearchIndexer::new_for_crate(name, version, &self.storage, member_name)?;
388
389 indexer.add_crate_items(name, version, &crate_data)?;
391
392 tracing::info!(
393 "Successfully created search index for {}{}-{}",
394 log_prefix,
395 name,
396 version
397 );
398 Ok(())
399 }
400}
401
402#[cfg(test)]
403mod tests {
404 use super::*;
405 use std::fs;
406 use tempfile::TempDir;
407
408 #[test]
409 fn test_docgen_creation() {
410 let temp_dir = TempDir::new().unwrap();
411 let storage = CacheStorage::new(Some(temp_dir.path().to_path_buf())).unwrap();
412 let docgen = DocGenerator::new(storage);
413
414 assert!(format!("{docgen:?}").contains("DocGenerator"));
416 }
417
418 #[test]
419 fn test_find_json_doc_not_found() {
420 let temp_dir = TempDir::new().unwrap();
421 let storage = CacheStorage::new(Some(temp_dir.path().to_path_buf())).unwrap();
422 let docgen = DocGenerator::new(storage);
423
424 let doc_dir = temp_dir.path().join(DOC_DIR);
425 fs::create_dir_all(&doc_dir).unwrap();
426
427 let result = docgen.find_json_doc(&doc_dir, "nonexistent");
428 assert!(result.is_err());
429 }
430
431 #[test]
432 fn test_find_json_doc_found() {
433 let temp_dir = TempDir::new().unwrap();
434 let storage = CacheStorage::new(Some(temp_dir.path().to_path_buf())).unwrap();
435 let docgen = DocGenerator::new(storage);
436
437 let doc_dir = temp_dir.path().join(DOC_DIR);
438 fs::create_dir_all(&doc_dir).unwrap();
439
440 let json_file = doc_dir.join("test_crate.json");
442 fs::write(&json_file, "{}").unwrap();
443
444 let result = docgen.find_json_doc(&doc_dir, "test_crate").unwrap();
445 assert_eq!(result, json_file);
446 }
447
448 #[test]
449 fn test_find_json_doc_with_underscore_conversion() {
450 let temp_dir = TempDir::new().unwrap();
451 let storage = CacheStorage::new(Some(temp_dir.path().to_path_buf())).unwrap();
452 let docgen = DocGenerator::new(storage);
453
454 let doc_dir = temp_dir.path().join(DOC_DIR);
455 fs::create_dir_all(&doc_dir).unwrap();
456
457 let json_file = doc_dir.join("test_crate.json");
459 fs::write(&json_file, "{}").unwrap();
460
461 let result = docgen.find_json_doc(&doc_dir, "test-crate").unwrap();
462 assert_eq!(result, json_file);
463 }
464}