1use crate::error::{FileSystemError, Result};
2use crate::generator::api_client::ApiFunction;
3use crate::generator::ts_typings::TypeScriptType;
4use crate::generator::utils::sanitize_module_name;
5use crate::generator::zod_schema::ZodSchema;
6use std::collections::hash_map::DefaultHasher;
7use std::hash::{Hash, Hasher};
8use std::path::{Path, PathBuf};
9use std::time::SystemTime;
10
11pub fn ensure_directory(path: &Path) -> Result<()> {
12 if !path.exists() {
13 std::fs::create_dir_all(path).map_err(|e| FileSystemError::CreateDirectoryFailed {
14 path: path.display().to_string(),
15 source: e,
16 })?;
17 }
18 Ok(())
19}
20
21pub fn write_schemas(
22 output_dir: &Path,
23 module_name: &str,
24 types: &[TypeScriptType],
25 zod_schemas: &[ZodSchema],
26) -> Result<Vec<PathBuf>> {
27 write_schemas_with_options(
28 output_dir,
29 module_name,
30 types,
31 zod_schemas,
32 None,
33 false,
34 false,
35 )
36}
37
38pub fn write_schemas_with_options(
39 output_dir: &Path,
40 module_name: &str,
41 types: &[TypeScriptType],
42 zod_schemas: &[ZodSchema],
43 spec_name: Option<&str>,
44 backup: bool,
45 force: bool,
46) -> Result<Vec<PathBuf>> {
47 write_schemas_with_module_mapping(
48 output_dir,
49 module_name,
50 types,
51 zod_schemas,
52 spec_name,
53 backup,
54 force,
55 None, &[], )
58}
59
60#[allow(clippy::too_many_arguments)]
61pub fn write_schemas_with_module_mapping(
62 output_dir: &Path,
63 module_name: &str,
64 types: &[TypeScriptType],
65 zod_schemas: &[ZodSchema],
66 _spec_name: Option<&str>,
67 backup: bool,
68 force: bool,
69 module_schemas: Option<&std::collections::HashMap<String, Vec<String>>>,
70 common_schemas: &[String],
71) -> Result<Vec<PathBuf>> {
72 let module_dir = output_dir.join(sanitize_module_name(module_name));
76 ensure_directory(&module_dir)?;
77
78 let mut written_files = Vec::new();
79
80 if !types.is_empty() {
82 let mut seen_type_names = std::collections::HashSet::new();
85 let mut deduplicated_types = Vec::new();
86 for t in types {
87 let type_name = if let Some(start) = t.content.find("export type ") {
89 let after_export_type = &t.content[start + 12..];
90 if let Some(end) = after_export_type.find([' ', '=', '\n']) {
91 after_export_type[..end].trim().to_string()
92 } else {
93 after_export_type.trim().to_string()
94 }
95 } else if let Some(start) = t.content.find("export interface ") {
96 let after_export_interface = &t.content[start + 17..];
97 if let Some(end) = after_export_interface.find([' ', '{', '\n']) {
98 after_export_interface[..end].trim().to_string()
99 } else {
100 after_export_interface.trim().to_string()
101 }
102 } else {
103 t.content.clone()
105 };
106
107 if !seen_type_names.contains(&type_name) {
108 seen_type_names.insert(type_name);
109 deduplicated_types.push(t);
110 }
111 }
112
113 let types_content_raw = deduplicated_types
114 .iter()
115 .map(|t| t.content.clone())
116 .collect::<Vec<_>>()
117 .join("\n\n");
118
119 let needs_common_import = types_content_raw.contains("Common.");
123 let common_import = if needs_common_import {
124 let relative_path = "../";
128 format!("import * as Common from \"{}common\";\n\n", relative_path)
129 } else {
130 String::new()
131 };
132
133 let types_content =
134 format_typescript_code(&format!("{}{}", common_import, types_content_raw));
135
136 let types_file = module_dir.join("types.ts");
137 write_file_with_backup(&types_file, &types_content, backup, force)?;
138 written_files.push(types_file);
139 }
140
141 if !zod_schemas.is_empty() {
143 let zod_content_raw = zod_schemas
144 .iter()
145 .map(|z| z.content.clone())
146 .collect::<Vec<_>>()
147 .join("\n\n");
148
149 let needs_common_import = zod_content_raw.contains("Common.");
153 let common_import = if needs_common_import {
154 let relative_path = "../";
158 format!("import * as Common from \"{}common\";\n\n", relative_path)
159 } else {
160 String::new()
161 };
162
163 let mut cross_module_imports: std::collections::HashMap<
167 String,
168 std::collections::HashSet<String>,
169 > = std::collections::HashMap::new();
170 if let Some(module_schemas_map) = module_schemas {
171 let _current_module_schemas: std::collections::HashSet<String> = module_schemas_map
172 .get(module_name)
173 .cloned()
174 .unwrap_or_default()
175 .into_iter()
176 .collect();
177
178 let locally_defined_enums: std::collections::HashSet<String> = zod_schemas
180 .iter()
181 .filter_map(|z| {
182 if let Some(start) = z.content.find("export const ") {
184 let after_export = &z.content[start + 13..];
185 if let Some(end) = after_export.find("EnumSchema") {
186 let enum_name = &after_export[..end + "EnumSchema".len()];
187 if enum_name.ends_with("EnumSchema") {
188 return Some(enum_name.to_string());
189 }
190 }
191 }
192 None
193 })
194 .collect();
195
196 let mut pos = 0;
199 while let Some(start) = zod_content_raw[pos..].find("EnumSchema") {
200 let actual_start = pos + start;
201 let mut name_start = actual_start;
203 while name_start > 0 {
204 let ch = zod_content_raw.chars().nth(name_start - 1).unwrap_or(' ');
205 if !ch.is_alphanumeric() && ch != '_' {
206 break;
207 }
208 name_start -= 1;
209 }
210 let enum_name = &zod_content_raw[name_start..actual_start + "EnumSchema".len()];
211
212 if enum_name.starts_with("Common.") {
214 pos = actual_start + "EnumSchema".len();
215 continue;
216 }
217
218 if locally_defined_enums.contains(enum_name) {
220 pos = actual_start + "EnumSchema".len();
221 continue;
222 }
223
224 let schema_name = enum_name.replace("EnumSchema", "");
226
227 if !locally_defined_enums.contains(enum_name)
230 && !common_schemas.contains(&schema_name)
231 {
232 let mut found_module: Option<String> = None;
235 for (other_module, other_schemas) in module_schemas_map {
236 if other_module != module_name && other_schemas.contains(&schema_name) {
237 if !common_schemas.contains(&schema_name) {
240 found_module = Some(other_module.clone());
241 break;
242 }
243 }
244 }
245
246 if found_module.is_none() {
249 let schema_name_lower = schema_name.to_lowercase();
250 for (other_module, other_schemas) in module_schemas_map {
251 if other_module != module_name {
252 for other_schema in other_schemas {
254 let other_schema_lower = other_schema.to_lowercase();
255 if (other_schema_lower == schema_name_lower
258 || other_schema_lower.contains(&schema_name_lower)
259 || schema_name_lower.contains(&other_schema_lower))
260 && !common_schemas.contains(other_schema)
261 {
262 found_module = Some(other_module.clone());
263 break;
264 }
265 }
266 if found_module.is_some() {
267 break;
268 }
269 }
270 }
271 }
272
273 if let Some(module) = found_module {
275 cross_module_imports
276 .entry(module)
277 .or_default()
278 .insert(enum_name.to_string());
279 }
280 }
283
284 pos = actual_start + "EnumSchema".len();
285 }
286 }
287
288 let mut cross_module_import_lines = String::new();
290 for (other_module, enum_names_set) in &cross_module_imports {
291 let mut enum_names: Vec<String> = enum_names_set.iter().cloned().collect();
292 enum_names.sort(); if !enum_names.is_empty() {
294 let relative_path = "../";
295 let module_import = format!(
296 "import {{ {} }} from \"{}{}\";\n",
297 enum_names.join(", "),
298 relative_path,
299 sanitize_module_name(other_module)
300 );
301 cross_module_import_lines.push_str(&module_import);
302 }
303 }
304 if !cross_module_import_lines.is_empty() {
305 cross_module_import_lines.push('\n');
306 }
307
308 let zod_content = format_typescript_code(&format!(
309 "import {{ z }} from \"zod\";\n{}{}{}",
310 if !common_import.is_empty() {
311 &common_import
312 } else {
313 ""
314 },
315 cross_module_import_lines,
316 zod_content_raw
317 ));
318
319 let zod_file = module_dir.join("schemas.ts");
320 write_file_with_backup(&zod_file, &zod_content, backup, force)?;
321 written_files.push(zod_file);
322 }
323
324 let mut index_exports = Vec::new();
326 if !types.is_empty() {
327 index_exports.push("export * from \"./types\";".to_string());
328 }
329 if !zod_schemas.is_empty() {
330 index_exports.push("export * from \"./schemas\";".to_string());
331 }
332
333 if !index_exports.is_empty() {
334 let index_content = format_typescript_code(&(index_exports.join("\n") + "\n"));
338 let index_file = module_dir.join("index.ts");
339 write_file_with_backup(&index_file, &index_content, backup, force)?;
340 written_files.push(index_file);
341 }
342
343 Ok(written_files)
344}
345
346pub fn write_api_client(
347 output_dir: &Path,
348 module_name: &str,
349 functions: &[ApiFunction],
350) -> Result<Vec<PathBuf>> {
351 write_api_client_with_options(output_dir, module_name, functions, None, false, false)
352}
353
354pub fn write_api_client_with_options(
355 output_dir: &Path,
356 module_name: &str,
357 functions: &[ApiFunction],
358 _spec_name: Option<&str>,
359 backup: bool,
360 force: bool,
361) -> Result<Vec<PathBuf>> {
362 let module_dir = output_dir.join(sanitize_module_name(module_name));
366 ensure_directory(&module_dir)?;
367
368 let mut written_files = Vec::new();
369
370 if !functions.is_empty() {
371 let mut imports_by_module: std::collections::HashMap<
375 String,
376 (std::collections::HashSet<String>, Vec<String>),
377 > = std::collections::HashMap::new();
378 let mut function_bodies = Vec::new();
379 let mut seen_functions: std::collections::HashSet<String> =
380 std::collections::HashSet::new();
381
382 for func in functions {
383 let lines: Vec<&str> = func.content.lines().collect();
384 let mut func_lines = Vec::new();
385 let mut in_function = false;
386 let mut jsdoc_lines = Vec::new();
387 let mut in_jsdoc = false;
388 let mut function_name: Option<String> = None;
389
390 for line in lines {
391 if line.trim().starts_with("import ") {
392 let import_line = line.trim().trim_end_matches(';').trim();
393 if let Some(from_pos) = import_line.find(" from ") {
395 let before_from = &import_line[..from_pos];
396 let after_from = &import_line[from_pos + 6..];
397 let module_path = after_from.trim_matches('"').trim_matches('\'').trim();
398
399 if before_from.contains("import type {") {
401 if let Some(start) = before_from.find('{') {
403 if let Some(end) = before_from.find('}') {
404 let items_str = &before_from[start + 1..end];
405 let items: Vec<String> = items_str
406 .split(',')
407 .map(|s| s.trim().to_string())
408 .filter(|s| !s.is_empty())
409 .collect();
410
411 let (type_imports, _) = imports_by_module
412 .entry(module_path.to_string())
413 .or_insert_with(|| {
414 (std::collections::HashSet::new(), Vec::new())
415 });
416 type_imports.extend(items);
417 }
418 }
419 } else if before_from.contains("import * as ") {
420 let (_, other_imports) = imports_by_module
423 .entry(module_path.to_string())
424 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
425 other_imports.push(import_line.to_string());
426 } else {
427 let (_, other_imports) = imports_by_module
430 .entry(module_path.to_string())
431 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
432 other_imports.push(import_line.to_string());
433 }
434 } else {
435 let (_, other_imports) = imports_by_module
437 .entry("".to_string())
438 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
439 other_imports.push(import_line.to_string());
440 }
441 } else if line.trim().starts_with("/**") {
442 in_jsdoc = true;
444 jsdoc_lines.push(line);
445 } else if in_jsdoc {
446 jsdoc_lines.push(line);
447 if line.trim().ends_with("*/") {
448 in_jsdoc = false;
450 }
451 } else if line.trim().starts_with("export const ") {
452 let trimmed = line.trim();
455 if trimmed.len() > 13 {
456 let after_export_const = &trimmed[13..];
457 let name_end = after_export_const
459 .find(' ')
460 .or_else(|| after_export_const.find('('))
461 .unwrap_or(after_export_const.len());
462 let name = after_export_const[..name_end].trim().to_string();
463 if !name.is_empty() {
464 function_name = Some(name.clone());
465 if seen_functions.contains(&name) {
466 jsdoc_lines.clear();
468 break;
469 }
470 seen_functions.insert(name);
471 }
472 }
473 in_function = true;
474 func_lines.append(&mut jsdoc_lines);
476 func_lines.push(line);
477 } else if in_function {
478 func_lines.push(line);
479 if line.trim() == "};" {
481 break;
482 }
483 }
484 }
486
487 if !func_lines.is_empty() && function_name.is_some() {
488 function_bodies.push(func_lines.join("\n"));
489 }
490 }
491
492 let mut imports_vec = Vec::new();
495 for (module_path, (type_import_items, other_imports)) in imports_by_module.iter() {
496 if module_path.is_empty() {
497 let deduped: std::collections::HashSet<String> =
499 other_imports.iter().cloned().collect();
500 imports_vec.extend(deduped.into_iter());
501 } else {
502 let deduped_imports: std::collections::HashSet<String> =
504 other_imports.iter().cloned().collect();
505 let mut namespace_imports = Vec::new();
506 let mut default_imports = Vec::new();
507
508 for item in deduped_imports.iter() {
509 if item.contains("import * as") {
510 namespace_imports.push(item.clone());
512 } else {
513 default_imports.push(item.clone());
515 }
516 }
517
518 namespace_imports.sort();
520 for ns_import in namespace_imports {
521 imports_vec.push(format!("{};", ns_import));
522 }
523
524 default_imports.sort();
526 for default_import in default_imports {
527 imports_vec.push(format!("{};", default_import));
528 }
529
530 if !type_import_items.is_empty() {
532 let mut sorted_types: Vec<String> = type_import_items.iter().cloned().collect();
533 sorted_types.sort();
534 imports_vec.push(format!(
535 "import type {{ {} }} from \"{}\";",
536 sorted_types.join(", "),
537 module_path
538 ));
539 }
540 }
541 }
542 let imports_str = imports_vec.join("\n");
543 let functions_str = function_bodies.join("\n\n");
544 let combined_content = if !imports_str.is_empty() {
545 format!("{}\n\n{}", imports_str, functions_str)
546 } else {
547 functions_str
548 };
549
550 let functions_content = format_typescript_code(&combined_content);
551
552 let api_file = module_dir.join("index.ts");
553 write_file_with_backup(&api_file, &functions_content, backup, force)?;
554 written_files.push(api_file);
555 }
556
557 Ok(written_files)
558}
559
560pub fn write_http_client_template(output_path: &Path) -> Result<()> {
561 ensure_directory(output_path.parent().unwrap_or(Path::new(".")))?;
562
563 let http_client_content = r#"const requestInitIndicators = [
564 "method",
565 "headers",
566 "body",
567 "signal",
568 "credentials",
569 "cache",
570 "redirect",
571 "referrer",
572 "referrerPolicy",
573 "integrity",
574 "keepalive",
575 "mode",
576 "priority",
577 "window",
578];
579
580const isRequestInitLike = (value: unknown): value is RequestInit => {
581 if (!value || typeof value !== "object") {
582 return false;
583 }
584 const candidate = value as Record<string, unknown>;
585 return requestInitIndicators.some((key) => key in candidate);
586};
587
588export const http = {
589 // GET helper. Second argument can be either a RequestInit or a JSON body for uncommon GET-with-body endpoints.
590 async get<T = any>(url: string, optionsOrBody?: RequestInit | unknown): Promise<T> {
591 let init: RequestInit = { method: "GET", body: null };
592
593 if (optionsOrBody !== undefined && optionsOrBody !== null) {
594 if (isRequestInitLike(optionsOrBody)) {
595 const candidate = optionsOrBody as RequestInit;
596 init = {
597 ...candidate,
598 method: "GET",
599 body: candidate.body ?? null,
600 };
601 } else {
602 init = {
603 method: "GET",
604 headers: {
605 "Content-Type": "application/json",
606 },
607 body: JSON.stringify(optionsOrBody),
608 };
609 }
610 }
611
612 const response = await fetch(url, {
613 ...init,
614 });
615 if (!response.ok) {
616 throw new Error(`HTTP error! status: ${response.status}`);
617 }
618 return response.json();
619 },
620
621 async post<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
622 const response = await fetch(url, {
623 ...options,
624 method: "POST",
625 headers: {
626 "Content-Type": "application/json",
627 ...(options.headers || {}),
628 },
629 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
630 });
631 if (!response.ok) {
632 throw new Error(`HTTP error! status: ${response.status}`);
633 }
634 return response.json();
635 },
636
637 async put<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
638 const response = await fetch(url, {
639 ...options,
640 method: "PUT",
641 headers: {
642 "Content-Type": "application/json",
643 ...(options.headers || {}),
644 },
645 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
646 });
647 if (!response.ok) {
648 throw new Error(`HTTP error! status: ${response.status}`);
649 }
650 return response.json();
651 },
652
653 async delete<T = any>(url: string, options: RequestInit = {}): Promise<T> {
654 const response = await fetch(url, {
655 ...options,
656 method: "DELETE",
657 body: options.body ?? null,
658 });
659 if (!response.ok) {
660 throw new Error(`HTTP error! status: ${response.status}`);
661 }
662 return response.json();
663 },
664
665 async patch<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
666 const response = await fetch(url, {
667 ...options,
668 method: "PATCH",
669 headers: {
670 "Content-Type": "application/json",
671 ...(options.headers || {}),
672 },
673 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
674 });
675 if (!response.ok) {
676 throw new Error(`HTTP error! status: ${response.status}`);
677 }
678 return response.json();
679 },
680
681 async head(url: string, options: RequestInit = {}): Promise<Response> {
682 const response = await fetch(url, {
683 ...options,
684 method: "HEAD",
685 body: options.body ?? null,
686 });
687 if (!response.ok) {
688 throw new Error(`HTTP error! status: ${response.status}`);
689 }
690 return response;
691 },
692
693 async options<T = any>(url: string, options: RequestInit = {}): Promise<T> {
694 const response = await fetch(url, {
695 ...options,
696 method: "OPTIONS",
697 body: options.body ?? null,
698 });
699 if (!response.ok) {
700 throw new Error(`HTTP error! status: ${response.status}`);
701 }
702 return response.json();
703 },
704};
705"#;
706
707 write_file_safe(output_path, http_client_content)?;
708
709 Ok(())
710}
711
712fn format_typescript_code(code: &str) -> String {
713 let lines: Vec<&str> = code.lines().collect();
715 let mut formatted = Vec::new();
716 let mut last_was_empty = false;
717
718 for line in lines {
719 if line.trim().is_empty() {
720 if !last_was_empty && !formatted.is_empty() {
721 formatted.push(String::new());
722 last_was_empty = true;
723 }
724 continue;
725 }
726 last_was_empty = false;
727 formatted.push(line.to_string());
728 }
729
730 while formatted.last().map(|s| s.is_empty()).unwrap_or(false) {
732 formatted.pop();
733 }
734
735 formatted.join("\n")
736}
737
738pub fn write_file_safe(path: &Path, content: &str) -> Result<()> {
739 write_file_with_backup(path, content, false, false)
740}
741
742pub fn write_file_with_backup(path: &Path, content: &str, backup: bool, force: bool) -> Result<()> {
743 let file_exists = path.exists();
745 let should_write = if file_exists {
746 if let Ok(existing_content) = std::fs::read_to_string(path) {
747 existing_content != content
748 } else {
749 true
750 }
751 } else {
752 true
753 };
754
755 if !should_write {
756 return Ok(());
758 }
759
760 if backup && file_exists {
762 create_backup(path)?;
763 }
764
765 if !force && file_exists {
767 if let Ok(metadata) = load_file_metadata(path) {
768 let current_hash = compute_content_hash(content);
769 let file_hash = compute_file_hash(path)?;
770
771 if metadata.hash != current_hash && metadata.hash != file_hash {
773 use crate::formatter::FormatterManager;
776
777 let mut search_dir = path.parent().unwrap_or_else(|| Path::new("."));
779 let mut formatter = None;
780
781 while search_dir != Path::new("/") && search_dir != Path::new("") {
783 if let Some(fmt) = FormatterManager::detect_formatter_from_dir(search_dir) {
784 formatter = Some(fmt);
785 break;
786 }
787 if let Some(parent) = search_dir.parent() {
788 search_dir = parent;
789 } else {
790 break;
791 }
792 }
793
794 if formatter.is_none() {
796 formatter = FormatterManager::detect_formatter();
797 }
798
799 if let Some(fmt) = formatter {
800 match FormatterManager::format_content(content, fmt, path) {
802 Ok(formatted_content) => {
803 let formatted_hash = compute_content_hash(&formatted_content);
804 if formatted_hash == file_hash {
805 } else {
808 if current_hash == metadata.hash {
811 return Err(FileSystemError::FileModifiedByUser {
814 path: path.display().to_string(),
815 }
816 .into());
817 }
818 }
821 }
822 Err(_) => {
823 if current_hash == metadata.hash {
825 }
828 }
830 }
831 } else {
832 if current_hash == metadata.hash {
834 }
838 }
840 }
841 }
842 }
843
844 std::fs::write(path, content).map_err(|e| FileSystemError::WriteFileFailed {
846 path: path.display().to_string(),
847 source: e,
848 })?;
849
850 save_file_metadata(path, content)?;
852
853 Ok(())
854}
855
856fn create_backup(path: &Path) -> Result<()> {
857 use std::collections::hash_map::DefaultHasher;
858 use std::hash::{Hash, Hasher};
859 use std::time::{SystemTime, UNIX_EPOCH};
860
861 let timestamp = SystemTime::now()
862 .duration_since(UNIX_EPOCH)
863 .unwrap()
864 .as_secs();
865
866 let backup_dir = PathBuf::from(format!(".vika-backup/{}", timestamp));
867 std::fs::create_dir_all(&backup_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
868 path: backup_dir.display().to_string(),
869 source: e,
870 })?;
871
872 let backup_path = if path.is_absolute() {
874 let path_str = path.display().to_string();
877 let mut hasher = DefaultHasher::new();
878 path_str.hash(&mut hasher);
879 let hash = format!("{:x}", hasher.finish());
880 let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or("file");
881 backup_dir.join(format!("{}_{}", hash, filename))
882 } else {
883 let relative_path = path.strip_prefix(".").unwrap_or(path);
885 backup_dir.join(relative_path)
886 };
887
888 if let Some(parent) = backup_path.parent() {
889 std::fs::create_dir_all(parent).map_err(|e| FileSystemError::CreateDirectoryFailed {
890 path: parent.display().to_string(),
891 source: e,
892 })?;
893 }
894
895 std::fs::copy(path, &backup_path).map_err(|e| FileSystemError::WriteFileFailed {
896 path: backup_path.display().to_string(),
897 source: e,
898 })?;
899
900 Ok(())
901}
902
903#[derive(Clone, serde::Serialize, serde::Deserialize)]
904struct FileMetadata {
905 hash: String,
906 generated_at: u64,
907 generated_by: String,
908}
909
910fn compute_content_hash(content: &str) -> String {
911 let mut hasher = DefaultHasher::new();
912 content.hash(&mut hasher);
913 format!("{:x}", hasher.finish())
914}
915
916fn compute_file_hash(path: &Path) -> Result<String> {
917 let content = std::fs::read_to_string(path).map_err(|e| FileSystemError::ReadFileFailed {
918 path: path.display().to_string(),
919 source: e,
920 })?;
921 Ok(compute_content_hash(&content))
922}
923
924pub fn update_file_metadata_from_disk(path: &Path) -> Result<()> {
927 let content = std::fs::read_to_string(path).map_err(|e| FileSystemError::ReadFileFailed {
928 path: path.display().to_string(),
929 source: e,
930 })?;
931 save_file_metadata(path, &content)
932}
933
934pub fn batch_update_file_metadata_from_disk(paths: &[PathBuf]) -> Result<()> {
938 if paths.is_empty() {
939 return Ok(());
940 }
941
942 let metadata_dir = PathBuf::from(".vika-cache");
943 std::fs::create_dir_all(&metadata_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
944 path: metadata_dir.display().to_string(),
945 source: e,
946 })?;
947
948 let metadata_file = metadata_dir.join("file-metadata.json");
949 let mut metadata_map: std::collections::HashMap<String, FileMetadata> =
950 if metadata_file.exists() {
951 let content = std::fs::read_to_string(&metadata_file).map_err(|e| {
952 FileSystemError::ReadFileFailed {
953 path: metadata_file.display().to_string(),
954 source: e,
955 }
956 })?;
957 serde_json::from_str(&content).unwrap_or_default()
958 } else {
959 std::collections::HashMap::new()
960 };
961
962 let generated_at = SystemTime::now()
963 .duration_since(std::time::UNIX_EPOCH)
964 .unwrap()
965 .as_secs();
966
967 for path in paths {
969 match std::fs::read_to_string(path) {
970 Ok(content) => {
971 let hash = compute_content_hash(&content);
972 metadata_map.insert(
973 path.display().to_string(),
974 FileMetadata {
975 hash,
976 generated_at,
977 generated_by: "vika-cli".to_string(),
978 },
979 );
980 }
981 Err(e) => {
982 eprintln!("Warning: Failed to read {}: {}", path.display(), e);
984 }
985 }
986 }
987
988 let json = serde_json::to_string_pretty(&metadata_map).map_err(|e| {
990 FileSystemError::WriteFileFailed {
991 path: metadata_file.display().to_string(),
992 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
993 }
994 })?;
995
996 std::fs::write(&metadata_file, json).map_err(|e| FileSystemError::WriteFileFailed {
997 path: metadata_file.display().to_string(),
998 source: e,
999 })?;
1000
1001 Ok(())
1002}
1003
1004pub fn save_file_metadata(path: &Path, content: &str) -> Result<()> {
1005 let metadata_dir = PathBuf::from(".vika-cache");
1006 std::fs::create_dir_all(&metadata_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
1007 path: metadata_dir.display().to_string(),
1008 source: e,
1009 })?;
1010
1011 let metadata_file = metadata_dir.join("file-metadata.json");
1012 let mut metadata_map: std::collections::HashMap<String, FileMetadata> =
1013 if metadata_file.exists() {
1014 let content = std::fs::read_to_string(&metadata_file).map_err(|e| {
1015 FileSystemError::ReadFileFailed {
1016 path: metadata_file.display().to_string(),
1017 source: e,
1018 }
1019 })?;
1020 serde_json::from_str(&content).unwrap_or_default()
1021 } else {
1022 std::collections::HashMap::new()
1023 };
1024
1025 let hash = compute_content_hash(content);
1026 let generated_at = SystemTime::now()
1027 .duration_since(std::time::UNIX_EPOCH)
1028 .unwrap()
1029 .as_secs();
1030
1031 metadata_map.insert(
1032 path.display().to_string(),
1033 FileMetadata {
1034 hash,
1035 generated_at,
1036 generated_by: "vika-cli".to_string(),
1037 },
1038 );
1039
1040 let json = serde_json::to_string_pretty(&metadata_map).map_err(|e| {
1041 FileSystemError::WriteFileFailed {
1042 path: metadata_file.display().to_string(),
1043 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
1044 }
1045 })?;
1046
1047 std::fs::write(&metadata_file, json).map_err(|e| FileSystemError::WriteFileFailed {
1048 path: metadata_file.display().to_string(),
1049 source: e,
1050 })?;
1051
1052 Ok(())
1053}
1054
1055fn load_file_metadata(path: &Path) -> Result<FileMetadata> {
1056 let metadata_file = PathBuf::from(".vika-cache/file-metadata.json");
1057 if !metadata_file.exists() {
1058 return Err(FileSystemError::FileNotFound {
1059 path: metadata_file.display().to_string(),
1060 }
1061 .into());
1062 }
1063
1064 let content =
1065 std::fs::read_to_string(&metadata_file).map_err(|e| FileSystemError::ReadFileFailed {
1066 path: metadata_file.display().to_string(),
1067 source: e,
1068 })?;
1069
1070 let metadata_map: std::collections::HashMap<String, FileMetadata> =
1071 serde_json::from_str(&content).map_err(|e| FileSystemError::ReadFileFailed {
1072 path: metadata_file.display().to_string(),
1073 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
1074 })?;
1075
1076 metadata_map
1077 .get(&path.display().to_string())
1078 .cloned()
1079 .ok_or_else(|| {
1080 FileSystemError::FileNotFound {
1081 path: path.display().to_string(),
1082 }
1083 .into()
1084 })
1085}