1use crate::error::{FileSystemError, Result};
2use crate::generator::api_client::ApiFunction;
3use crate::generator::ts_typings::TypeScriptType;
4use crate::generator::utils::sanitize_module_name;
5use crate::generator::zod_schema::ZodSchema;
6use std::collections::hash_map::DefaultHasher;
7use std::hash::{Hash, Hasher};
8use std::path::{Path, PathBuf};
9use std::time::SystemTime;
10
11pub fn ensure_directory(path: &Path) -> Result<()> {
12 if !path.exists() {
13 std::fs::create_dir_all(path).map_err(|e| FileSystemError::CreateDirectoryFailed {
14 path: path.display().to_string(),
15 source: e,
16 })?;
17 }
18 Ok(())
19}
20
21pub fn write_schemas(
22 output_dir: &Path,
23 module_name: &str,
24 types: &[TypeScriptType],
25 zod_schemas: &[ZodSchema],
26) -> Result<Vec<PathBuf>> {
27 write_schemas_with_options(
28 output_dir,
29 module_name,
30 types,
31 zod_schemas,
32 None,
33 false,
34 false,
35 )
36}
37
38pub fn write_schemas_with_options(
39 output_dir: &Path,
40 module_name: &str,
41 types: &[TypeScriptType],
42 zod_schemas: &[ZodSchema],
43 spec_name: Option<&str>,
44 backup: bool,
45 force: bool,
46) -> Result<Vec<PathBuf>> {
47 write_schemas_with_module_mapping(
48 output_dir,
49 module_name,
50 types,
51 zod_schemas,
52 spec_name,
53 backup,
54 force,
55 None, &[], )
58}
59
60#[allow(clippy::too_many_arguments)]
61pub fn write_schemas_with_module_mapping(
62 output_dir: &Path,
63 module_name: &str,
64 types: &[TypeScriptType],
65 zod_schemas: &[ZodSchema],
66 _spec_name: Option<&str>,
67 backup: bool,
68 force: bool,
69 module_schemas: Option<&std::collections::HashMap<String, Vec<String>>>,
70 common_schemas: &[String],
71) -> Result<Vec<PathBuf>> {
72 let module_dir = output_dir.join(sanitize_module_name(module_name));
76 ensure_directory(&module_dir)?;
77
78 let mut written_files = Vec::new();
79
80 if !types.is_empty() {
82 let mut seen_type_names = std::collections::HashSet::new();
85 let mut deduplicated_types = Vec::new();
86 for t in types {
87 let type_name = if let Some(start) = t.content.find("export type ") {
89 let after_export_type = &t.content[start + 12..];
90 if let Some(end) = after_export_type.find([' ', '=', '\n']) {
91 after_export_type[..end].trim().to_string()
92 } else {
93 after_export_type.trim().to_string()
94 }
95 } else if let Some(start) = t.content.find("export interface ") {
96 let after_export_interface = &t.content[start + 17..];
97 if let Some(end) = after_export_interface.find([' ', '{', '\n']) {
98 after_export_interface[..end].trim().to_string()
99 } else {
100 after_export_interface.trim().to_string()
101 }
102 } else {
103 t.content.clone()
105 };
106
107 if !seen_type_names.contains(&type_name) {
108 seen_type_names.insert(type_name);
109 deduplicated_types.push(t);
110 }
111 }
112
113 let types_content_raw = deduplicated_types
114 .iter()
115 .map(|t| t.content.clone())
116 .collect::<Vec<_>>()
117 .join("\n\n");
118
119 let needs_common_import = types_content_raw.contains("Common.");
123 let common_import = if needs_common_import {
124 let relative_path = "../";
128 format!("import * as Common from \"{}common\";\n\n", relative_path)
129 } else {
130 String::new()
131 };
132
133 let types_content =
134 format_typescript_code(&format!("{}{}", common_import, types_content_raw));
135
136 let types_file = module_dir.join("types.ts");
137 write_file_with_backup(&types_file, &types_content, backup, force)?;
138 written_files.push(types_file);
139 }
140
141 if !zod_schemas.is_empty() {
143 let zod_content_raw = zod_schemas
144 .iter()
145 .map(|z| z.content.clone())
146 .collect::<Vec<_>>()
147 .join("\n\n");
148
149 let needs_common_import = zod_content_raw.contains("Common.");
153 let common_import = if needs_common_import {
154 let relative_path = "../";
158 format!("import * as Common from \"{}common\";\n\n", relative_path)
159 } else {
160 String::new()
161 };
162
163 let mut cross_module_imports: std::collections::HashMap<
167 String,
168 std::collections::HashSet<String>,
169 > = std::collections::HashMap::new();
170 if let Some(module_schemas_map) = module_schemas {
171 let _current_module_schemas: std::collections::HashSet<String> = module_schemas_map
172 .get(module_name)
173 .cloned()
174 .unwrap_or_default()
175 .into_iter()
176 .collect();
177
178 let locally_defined_enums: std::collections::HashSet<String> = zod_schemas
180 .iter()
181 .filter_map(|z| {
182 if let Some(start) = z.content.find("export const ") {
184 let after_export = &z.content[start + 13..];
185 if let Some(end) = after_export.find("EnumSchema") {
186 let enum_name = &after_export[..end + "EnumSchema".len()];
187 if enum_name.ends_with("EnumSchema") {
188 return Some(enum_name.to_string());
189 }
190 }
191 }
192 None
193 })
194 .collect();
195
196 let mut pos = 0;
199 while let Some(start) = zod_content_raw[pos..].find("EnumSchema") {
200 let actual_start = pos + start;
201 let mut name_start = actual_start;
203 while name_start > 0 {
204 let ch = zod_content_raw.chars().nth(name_start - 1).unwrap_or(' ');
205 if !ch.is_alphanumeric() && ch != '_' {
206 break;
207 }
208 name_start -= 1;
209 }
210 let enum_name = &zod_content_raw[name_start..actual_start + "EnumSchema".len()];
211
212 if enum_name.starts_with("Common.") {
214 pos = actual_start + "EnumSchema".len();
215 continue;
216 }
217
218 if locally_defined_enums.contains(enum_name) {
220 pos = actual_start + "EnumSchema".len();
221 continue;
222 }
223
224 let schema_name = enum_name.replace("EnumSchema", "");
226
227 if !locally_defined_enums.contains(enum_name)
230 && !common_schemas.contains(&schema_name)
231 {
232 let mut found_module: Option<String> = None;
235 for (other_module, other_schemas) in module_schemas_map {
236 if other_module != module_name && other_schemas.contains(&schema_name) {
237 if !common_schemas.contains(&schema_name) {
240 found_module = Some(other_module.clone());
241 break;
242 }
243 }
244 }
245
246 if found_module.is_none() {
249 let schema_name_lower = schema_name.to_lowercase();
250 for (other_module, other_schemas) in module_schemas_map {
251 if other_module != module_name {
252 for other_schema in other_schemas {
254 let other_schema_lower = other_schema.to_lowercase();
255 if (other_schema_lower == schema_name_lower
258 || other_schema_lower.contains(&schema_name_lower)
259 || schema_name_lower.contains(&other_schema_lower))
260 && !common_schemas.contains(other_schema)
261 {
262 found_module = Some(other_module.clone());
263 break;
264 }
265 }
266 if found_module.is_some() {
267 break;
268 }
269 }
270 }
271 }
272
273 if let Some(module) = found_module {
275 cross_module_imports
276 .entry(module)
277 .or_default()
278 .insert(enum_name.to_string());
279 }
280 }
283
284 pos = actual_start + "EnumSchema".len();
285 }
286 }
287
288 let mut cross_module_import_lines = String::new();
290 for (other_module, enum_names_set) in &cross_module_imports {
291 let mut enum_names: Vec<String> = enum_names_set.iter().cloned().collect();
292 enum_names.sort(); if !enum_names.is_empty() {
294 let relative_path = "../";
295 let module_import = format!(
296 "import {{ {} }} from \"{}{}\";\n",
297 enum_names.join(", "),
298 relative_path,
299 sanitize_module_name(other_module)
300 );
301 cross_module_import_lines.push_str(&module_import);
302 }
303 }
304 if !cross_module_import_lines.is_empty() {
305 cross_module_import_lines.push('\n');
306 }
307
308 let zod_content = format_typescript_code(&format!(
309 "import {{ z }} from \"zod\";\n{}{}{}",
310 if !common_import.is_empty() {
311 &common_import
312 } else {
313 ""
314 },
315 cross_module_import_lines,
316 zod_content_raw
317 ));
318
319 let zod_file = module_dir.join("schemas.ts");
320 write_file_with_backup(&zod_file, &zod_content, backup, force)?;
321 written_files.push(zod_file);
322 }
323
324 let mut index_exports = Vec::new();
326 if !types.is_empty() {
327 index_exports.push("export * from \"./types\";".to_string());
328 }
329 if !zod_schemas.is_empty() {
330 index_exports.push("export * from \"./schemas\";".to_string());
331 }
332
333 if !index_exports.is_empty() {
334 let index_content = format_typescript_code(&(index_exports.join("\n") + "\n"));
338 let index_file = module_dir.join("index.ts");
339 write_file_with_backup(&index_file, &index_content, backup, force)?;
340 written_files.push(index_file);
341 }
342
343 Ok(written_files)
344}
345
346pub fn write_api_client(
347 output_dir: &Path,
348 module_name: &str,
349 functions: &[ApiFunction],
350) -> Result<Vec<PathBuf>> {
351 write_api_client_with_options(output_dir, module_name, functions, None, false, false)
352}
353
354pub fn write_api_client_with_options(
355 output_dir: &Path,
356 module_name: &str,
357 functions: &[ApiFunction],
358 _spec_name: Option<&str>,
359 backup: bool,
360 force: bool,
361) -> Result<Vec<PathBuf>> {
362 let module_dir = output_dir.join(sanitize_module_name(module_name));
366 ensure_directory(&module_dir)?;
367
368 let mut written_files = Vec::new();
369
370 if !functions.is_empty() {
371 let mut imports_by_module: std::collections::HashMap<
375 String,
376 (std::collections::HashSet<String>, Vec<String>),
377 > = std::collections::HashMap::new();
378 let mut function_bodies = Vec::new();
379 let mut seen_functions: std::collections::HashSet<String> =
380 std::collections::HashSet::new();
381
382 for func in functions {
383 let lines: Vec<&str> = func.content.lines().collect();
384 let mut func_lines = Vec::new();
385 let mut in_function = false;
386 let mut jsdoc_lines = Vec::new();
387 let mut in_jsdoc = false;
388 let mut function_name: Option<String> = None;
389
390 for line in lines {
391 if line.trim().starts_with("import ") {
392 let import_line = line.trim().trim_end_matches(';').trim();
393 if let Some(from_pos) = import_line.find(" from ") {
395 let before_from = &import_line[..from_pos];
396 let after_from = &import_line[from_pos + 6..];
397 let module_path = after_from.trim_matches('"').trim_matches('\'').trim();
398
399 if before_from.contains("import type {") {
401 if let Some(start) = before_from.find('{') {
403 if let Some(end) = before_from.find('}') {
404 let items_str = &before_from[start + 1..end];
405 let items: Vec<String> = items_str
406 .split(',')
407 .map(|s| s.trim().to_string())
408 .filter(|s| !s.is_empty())
409 .collect();
410
411 let (type_imports, _) = imports_by_module
412 .entry(module_path.to_string())
413 .or_insert_with(|| {
414 (std::collections::HashSet::new(), Vec::new())
415 });
416 type_imports.extend(items);
417 }
418 }
419 } else if before_from.contains("import * as ") {
420 let (_, other_imports) = imports_by_module
423 .entry(module_path.to_string())
424 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
425 other_imports.push(import_line.to_string());
426 } else {
427 let (_, other_imports) = imports_by_module
430 .entry(module_path.to_string())
431 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
432 other_imports.push(import_line.to_string());
433 }
434 } else {
435 let (_, other_imports) = imports_by_module
437 .entry("".to_string())
438 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
439 other_imports.push(import_line.to_string());
440 }
441 } else if line.trim().starts_with("/**") {
442 in_jsdoc = true;
444 jsdoc_lines.push(line);
445 } else if in_jsdoc {
446 jsdoc_lines.push(line);
447 if line.trim().ends_with("*/") {
448 in_jsdoc = false;
450 }
451 } else if line.trim().starts_with("export const ") {
452 let trimmed = line.trim();
455 if trimmed.len() > 13 {
456 let after_export_const = &trimmed[13..];
457 let name_end = after_export_const
459 .find(' ')
460 .or_else(|| after_export_const.find('('))
461 .unwrap_or(after_export_const.len());
462 let name = after_export_const[..name_end].trim().to_string();
463 if !name.is_empty() {
464 function_name = Some(name.clone());
465 if seen_functions.contains(&name) {
466 jsdoc_lines.clear();
468 break;
469 }
470 seen_functions.insert(name);
471 }
472 }
473 in_function = true;
474 func_lines.append(&mut jsdoc_lines);
476 func_lines.push(line);
477 } else if in_function {
478 func_lines.push(line);
479 if line.trim() == "};" {
481 break;
482 }
483 }
484 }
486
487 if !func_lines.is_empty() && function_name.is_some() {
488 function_bodies.push(func_lines.join("\n"));
489 }
490 }
491
492 let mut sorted_module_paths: Vec<String> = imports_by_module.keys().cloned().collect();
496 sorted_module_paths.sort();
497
498 let mut imports_vec = Vec::new();
499 for module_path in sorted_module_paths {
500 let (type_import_items, other_imports) = imports_by_module.get(&module_path).unwrap();
501 if module_path.is_empty() {
502 let deduped: std::collections::HashSet<String> =
504 other_imports.iter().cloned().collect();
505 imports_vec.extend(deduped.into_iter());
506 } else {
507 let deduped_imports: std::collections::HashSet<String> =
509 other_imports.iter().cloned().collect();
510 let mut namespace_imports = Vec::new();
511 let mut default_imports = Vec::new();
512
513 for item in deduped_imports.iter() {
514 if item.contains("import * as") {
515 namespace_imports.push(item.clone());
517 } else {
518 default_imports.push(item.clone());
520 }
521 }
522
523 namespace_imports.sort();
525 for ns_import in namespace_imports {
526 imports_vec.push(format!("{};", ns_import));
527 }
528
529 default_imports.sort();
531 for default_import in default_imports {
532 imports_vec.push(format!("{};", default_import));
533 }
534
535 if !type_import_items.is_empty() {
537 let mut sorted_types: Vec<String> = type_import_items.iter().cloned().collect();
538 sorted_types.sort();
539 imports_vec.push(format!(
540 "import type {{ {} }} from \"{}\";",
541 sorted_types.join(", "),
542 module_path
543 ));
544 }
545 }
546 }
547 let imports_str = imports_vec.join("\n");
548 let functions_str = function_bodies.join("\n\n");
549 let combined_content = if !imports_str.is_empty() {
550 format!("{}\n\n{}", imports_str, functions_str)
551 } else {
552 functions_str
553 };
554
555 let functions_content = format_typescript_code(&combined_content);
556
557 let api_file = module_dir.join("index.ts");
558 write_file_with_backup(&api_file, &functions_content, backup, force)?;
559 written_files.push(api_file);
560 }
561
562 Ok(written_files)
563}
564
565pub fn write_http_client_template(output_path: &Path) -> Result<()> {
566 ensure_directory(output_path.parent().unwrap_or(Path::new(".")))?;
567
568 let http_client_content = r#"const requestInitIndicators = [
569 "method",
570 "headers",
571 "body",
572 "signal",
573 "credentials",
574 "cache",
575 "redirect",
576 "referrer",
577 "referrerPolicy",
578 "integrity",
579 "keepalive",
580 "mode",
581 "priority",
582 "window",
583];
584
585const isRequestInitLike = (value: unknown): value is RequestInit => {
586 if (!value || typeof value !== "object") {
587 return false;
588 }
589 const candidate = value as Record<string, unknown>;
590 return requestInitIndicators.some((key) => key in candidate);
591};
592
593export const http = {
594 // GET helper. Second argument can be either a RequestInit or a JSON body for uncommon GET-with-body endpoints.
595 async get<T = any>(url: string, optionsOrBody?: RequestInit | unknown): Promise<T> {
596 let init: RequestInit = { method: "GET", body: null };
597
598 if (optionsOrBody !== undefined && optionsOrBody !== null) {
599 if (isRequestInitLike(optionsOrBody)) {
600 const candidate = optionsOrBody as RequestInit;
601 init = {
602 ...candidate,
603 method: "GET",
604 body: candidate.body ?? null,
605 };
606 } else {
607 init = {
608 method: "GET",
609 headers: {
610 "Content-Type": "application/json",
611 },
612 body: JSON.stringify(optionsOrBody),
613 };
614 }
615 }
616
617 const response = await fetch(url, {
618 ...init,
619 });
620 if (!response.ok) {
621 throw new Error(`HTTP error! status: ${response.status}`);
622 }
623 return response.json();
624 },
625
626 async post<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
627 const response = await fetch(url, {
628 ...options,
629 method: "POST",
630 headers: {
631 "Content-Type": "application/json",
632 ...(options.headers || {}),
633 },
634 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
635 });
636 if (!response.ok) {
637 throw new Error(`HTTP error! status: ${response.status}`);
638 }
639 return response.json();
640 },
641
642 async put<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
643 const response = await fetch(url, {
644 ...options,
645 method: "PUT",
646 headers: {
647 "Content-Type": "application/json",
648 ...(options.headers || {}),
649 },
650 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
651 });
652 if (!response.ok) {
653 throw new Error(`HTTP error! status: ${response.status}`);
654 }
655 return response.json();
656 },
657
658 async delete<T = any>(url: string, options: RequestInit = {}): Promise<T> {
659 const response = await fetch(url, {
660 ...options,
661 method: "DELETE",
662 body: options.body ?? null,
663 });
664 if (!response.ok) {
665 throw new Error(`HTTP error! status: ${response.status}`);
666 }
667 return response.json();
668 },
669
670 async patch<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
671 const response = await fetch(url, {
672 ...options,
673 method: "PATCH",
674 headers: {
675 "Content-Type": "application/json",
676 ...(options.headers || {}),
677 },
678 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
679 });
680 if (!response.ok) {
681 throw new Error(`HTTP error! status: ${response.status}`);
682 }
683 return response.json();
684 },
685
686 async head(url: string, options: RequestInit = {}): Promise<Response> {
687 const response = await fetch(url, {
688 ...options,
689 method: "HEAD",
690 body: options.body ?? null,
691 });
692 if (!response.ok) {
693 throw new Error(`HTTP error! status: ${response.status}`);
694 }
695 return response;
696 },
697
698 async options<T = any>(url: string, options: RequestInit = {}): Promise<T> {
699 const response = await fetch(url, {
700 ...options,
701 method: "OPTIONS",
702 body: options.body ?? null,
703 });
704 if (!response.ok) {
705 throw new Error(`HTTP error! status: ${response.status}`);
706 }
707 return response.json();
708 },
709};
710"#;
711
712 write_file_safe(output_path, http_client_content)?;
713
714 Ok(())
715}
716
717fn format_typescript_code(code: &str) -> String {
718 let lines: Vec<&str> = code.lines().collect();
720 let mut formatted = Vec::new();
721 let mut last_was_empty = false;
722
723 for line in lines {
724 if line.trim().is_empty() {
725 if !last_was_empty && !formatted.is_empty() {
726 formatted.push(String::new());
727 last_was_empty = true;
728 }
729 continue;
730 }
731 last_was_empty = false;
732 formatted.push(line.to_string());
733 }
734
735 while formatted.last().map(|s| s.is_empty()).unwrap_or(false) {
737 formatted.pop();
738 }
739
740 formatted.join("\n")
741}
742
743pub fn write_file_safe(path: &Path, content: &str) -> Result<()> {
744 write_file_with_backup(path, content, false, false)
745}
746
747pub fn write_file_with_backup(path: &Path, content: &str, backup: bool, force: bool) -> Result<()> {
748 let file_exists = path.exists();
750 let should_write = if file_exists {
751 if let Ok(existing_content) = std::fs::read_to_string(path) {
752 existing_content != content
753 } else {
754 true
755 }
756 } else {
757 true
758 };
759
760 if !should_write {
761 return Ok(());
763 }
764
765 if backup && file_exists {
767 create_backup(path)?;
768 }
769
770 if !force && file_exists {
772 if let Ok(metadata) = load_file_metadata(path) {
773 let current_hash = compute_content_hash(content);
774 let file_hash = compute_file_hash(path)?;
775
776 if metadata.hash != current_hash && metadata.hash != file_hash {
778 use crate::formatter::FormatterManager;
781
782 let mut search_dir = path.parent().unwrap_or_else(|| Path::new("."));
784 let mut formatter = None;
785
786 while search_dir != Path::new("/") && search_dir != Path::new("") {
788 if let Some(fmt) = FormatterManager::detect_formatter_from_dir(search_dir) {
789 formatter = Some(fmt);
790 break;
791 }
792 if let Some(parent) = search_dir.parent() {
793 search_dir = parent;
794 } else {
795 break;
796 }
797 }
798
799 if formatter.is_none() {
801 formatter = FormatterManager::detect_formatter();
802 }
803
804 if let Some(fmt) = formatter {
805 match FormatterManager::format_content(content, fmt, path) {
807 Ok(formatted_content) => {
808 let formatted_hash = compute_content_hash(&formatted_content);
809 if formatted_hash == file_hash {
810 } else {
813 if current_hash == metadata.hash {
816 return Err(FileSystemError::FileModifiedByUser {
819 path: path.display().to_string(),
820 }
821 .into());
822 }
823 }
826 }
827 Err(_) => {
828 if current_hash == metadata.hash {
830 }
833 }
835 }
836 } else {
837 if current_hash == metadata.hash {
839 }
843 }
845 }
846 }
847 }
848
849 std::fs::write(path, content).map_err(|e| FileSystemError::WriteFileFailed {
851 path: path.display().to_string(),
852 source: e,
853 })?;
854
855 save_file_metadata(path, content)?;
857
858 Ok(())
859}
860
861fn create_backup(path: &Path) -> Result<()> {
862 use std::collections::hash_map::DefaultHasher;
863 use std::hash::{Hash, Hasher};
864 use std::time::{SystemTime, UNIX_EPOCH};
865
866 let timestamp = SystemTime::now()
867 .duration_since(UNIX_EPOCH)
868 .unwrap()
869 .as_secs();
870
871 let backup_dir = PathBuf::from(format!(".vika-backup/{}", timestamp));
872 std::fs::create_dir_all(&backup_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
873 path: backup_dir.display().to_string(),
874 source: e,
875 })?;
876
877 let backup_path = if path.is_absolute() {
879 let path_str = path.display().to_string();
882 let mut hasher = DefaultHasher::new();
883 path_str.hash(&mut hasher);
884 let hash = format!("{:x}", hasher.finish());
885 let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or("file");
886 backup_dir.join(format!("{}_{}", hash, filename))
887 } else {
888 let relative_path = path.strip_prefix(".").unwrap_or(path);
890 backup_dir.join(relative_path)
891 };
892
893 if let Some(parent) = backup_path.parent() {
894 std::fs::create_dir_all(parent).map_err(|e| FileSystemError::CreateDirectoryFailed {
895 path: parent.display().to_string(),
896 source: e,
897 })?;
898 }
899
900 std::fs::copy(path, &backup_path).map_err(|e| FileSystemError::WriteFileFailed {
901 path: backup_path.display().to_string(),
902 source: e,
903 })?;
904
905 Ok(())
906}
907
908#[derive(Clone, serde::Serialize, serde::Deserialize)]
909struct FileMetadata {
910 hash: String,
911 generated_at: u64,
912 generated_by: String,
913}
914
915fn compute_content_hash(content: &str) -> String {
916 let mut hasher = DefaultHasher::new();
917 content.hash(&mut hasher);
918 format!("{:x}", hasher.finish())
919}
920
921fn compute_file_hash(path: &Path) -> Result<String> {
922 let content = std::fs::read_to_string(path).map_err(|e| FileSystemError::ReadFileFailed {
923 path: path.display().to_string(),
924 source: e,
925 })?;
926 Ok(compute_content_hash(&content))
927}
928
929pub fn update_file_metadata_from_disk(path: &Path) -> Result<()> {
932 let content = std::fs::read_to_string(path).map_err(|e| FileSystemError::ReadFileFailed {
933 path: path.display().to_string(),
934 source: e,
935 })?;
936 save_file_metadata(path, &content)
937}
938
939pub fn batch_update_file_metadata_from_disk(paths: &[PathBuf]) -> Result<()> {
943 if paths.is_empty() {
944 return Ok(());
945 }
946
947 let metadata_dir = PathBuf::from(".vika-cache");
948 std::fs::create_dir_all(&metadata_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
949 path: metadata_dir.display().to_string(),
950 source: e,
951 })?;
952
953 let metadata_file = metadata_dir.join("file-metadata.json");
954 let mut metadata_map: std::collections::HashMap<String, FileMetadata> =
955 if metadata_file.exists() {
956 let content = std::fs::read_to_string(&metadata_file).map_err(|e| {
957 FileSystemError::ReadFileFailed {
958 path: metadata_file.display().to_string(),
959 source: e,
960 }
961 })?;
962 serde_json::from_str(&content).unwrap_or_default()
963 } else {
964 std::collections::HashMap::new()
965 };
966
967 let generated_at = SystemTime::now()
968 .duration_since(std::time::UNIX_EPOCH)
969 .unwrap()
970 .as_secs();
971
972 for path in paths {
974 match std::fs::read_to_string(path) {
975 Ok(content) => {
976 let hash = compute_content_hash(&content);
977 metadata_map.insert(
978 path.display().to_string(),
979 FileMetadata {
980 hash,
981 generated_at,
982 generated_by: "vika-cli".to_string(),
983 },
984 );
985 }
986 Err(e) => {
987 eprintln!("Warning: Failed to read {}: {}", path.display(), e);
989 }
990 }
991 }
992
993 let json = serde_json::to_string_pretty(&metadata_map).map_err(|e| {
995 FileSystemError::WriteFileFailed {
996 path: metadata_file.display().to_string(),
997 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
998 }
999 })?;
1000
1001 std::fs::write(&metadata_file, json).map_err(|e| FileSystemError::WriteFileFailed {
1002 path: metadata_file.display().to_string(),
1003 source: e,
1004 })?;
1005
1006 Ok(())
1007}
1008
1009pub fn save_file_metadata(path: &Path, content: &str) -> Result<()> {
1010 let metadata_dir = PathBuf::from(".vika-cache");
1011 std::fs::create_dir_all(&metadata_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
1012 path: metadata_dir.display().to_string(),
1013 source: e,
1014 })?;
1015
1016 let metadata_file = metadata_dir.join("file-metadata.json");
1017 let mut metadata_map: std::collections::HashMap<String, FileMetadata> =
1018 if metadata_file.exists() {
1019 let content = std::fs::read_to_string(&metadata_file).map_err(|e| {
1020 FileSystemError::ReadFileFailed {
1021 path: metadata_file.display().to_string(),
1022 source: e,
1023 }
1024 })?;
1025 serde_json::from_str(&content).unwrap_or_default()
1026 } else {
1027 std::collections::HashMap::new()
1028 };
1029
1030 let hash = compute_content_hash(content);
1031 let generated_at = SystemTime::now()
1032 .duration_since(std::time::UNIX_EPOCH)
1033 .unwrap()
1034 .as_secs();
1035
1036 metadata_map.insert(
1037 path.display().to_string(),
1038 FileMetadata {
1039 hash,
1040 generated_at,
1041 generated_by: "vika-cli".to_string(),
1042 },
1043 );
1044
1045 let json = serde_json::to_string_pretty(&metadata_map).map_err(|e| {
1046 FileSystemError::WriteFileFailed {
1047 path: metadata_file.display().to_string(),
1048 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
1049 }
1050 })?;
1051
1052 std::fs::write(&metadata_file, json).map_err(|e| FileSystemError::WriteFileFailed {
1053 path: metadata_file.display().to_string(),
1054 source: e,
1055 })?;
1056
1057 Ok(())
1058}
1059
1060fn load_file_metadata(path: &Path) -> Result<FileMetadata> {
1061 let metadata_file = PathBuf::from(".vika-cache/file-metadata.json");
1062 if !metadata_file.exists() {
1063 return Err(FileSystemError::FileNotFound {
1064 path: metadata_file.display().to_string(),
1065 }
1066 .into());
1067 }
1068
1069 let content =
1070 std::fs::read_to_string(&metadata_file).map_err(|e| FileSystemError::ReadFileFailed {
1071 path: metadata_file.display().to_string(),
1072 source: e,
1073 })?;
1074
1075 let metadata_map: std::collections::HashMap<String, FileMetadata> =
1076 serde_json::from_str(&content).map_err(|e| FileSystemError::ReadFileFailed {
1077 path: metadata_file.display().to_string(),
1078 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
1079 })?;
1080
1081 metadata_map
1082 .get(&path.display().to_string())
1083 .cloned()
1084 .ok_or_else(|| {
1085 FileSystemError::FileNotFound {
1086 path: path.display().to_string(),
1087 }
1088 .into()
1089 })
1090}
1091
1092pub fn write_hooks_with_options(
1094 output_dir: &Path,
1095 module_name: &str,
1096 hooks: &[crate::generator::hooks::HookFile],
1097 _spec_name: Option<&str>,
1098 backup: bool,
1099 force: bool,
1100) -> Result<Vec<PathBuf>> {
1101 let module_dir = output_dir.join(sanitize_module_name(module_name));
1104 ensure_directory(&module_dir)?;
1105
1106 let mut written_files = Vec::new();
1107
1108 for hook in hooks {
1109 let hook_file = module_dir.join(&hook.filename);
1110 write_file_with_backup(&hook_file, &hook.content, backup, force)?;
1111 written_files.push(hook_file);
1112 }
1113
1114 Ok(written_files)
1115}
1116
1117pub fn write_query_keys_with_options(
1119 output_dir: &Path,
1120 module_name: &str,
1121 query_keys_content: &str,
1122 _spec_name: Option<&str>,
1123 backup: bool,
1124 force: bool,
1125) -> Result<PathBuf> {
1126 ensure_directory(output_dir)?;
1129
1130 let filename = format!("{}.ts", sanitize_module_name(module_name));
1132 let query_keys_file = output_dir.join(&filename);
1133
1134 write_file_with_backup(&query_keys_file, query_keys_content, backup, force)?;
1135
1136 Ok(query_keys_file)
1137}