1use crate::error::{FileSystemError, Result};
2use crate::generator::api_client::ApiFunction;
3use crate::generator::ts_typings::TypeScriptType;
4use crate::generator::utils::sanitize_module_name;
5use crate::generator::zod_schema::ZodSchema;
6use std::collections::hash_map::DefaultHasher;
7use std::hash::{Hash, Hasher};
8use std::path::{Path, PathBuf};
9use std::time::SystemTime;
10
11pub fn ensure_directory(path: &Path) -> Result<()> {
12 if !path.exists() {
13 std::fs::create_dir_all(path).map_err(|e| FileSystemError::CreateDirectoryFailed {
14 path: path.display().to_string(),
15 source: e,
16 })?;
17 }
18 Ok(())
19}
20
21pub fn write_schemas(
22 output_dir: &Path,
23 module_name: &str,
24 types: &[TypeScriptType],
25 zod_schemas: &[ZodSchema],
26) -> Result<Vec<PathBuf>> {
27 write_schemas_with_options(output_dir, module_name, types, zod_schemas, false, false)
28}
29
30pub fn write_schemas_with_options(
31 output_dir: &Path,
32 module_name: &str,
33 types: &[TypeScriptType],
34 zod_schemas: &[ZodSchema],
35 backup: bool,
36 force: bool,
37) -> Result<Vec<PathBuf>> {
38 let module_dir = output_dir.join(sanitize_module_name(module_name));
39 ensure_directory(&module_dir)?;
40
41 let mut written_files = Vec::new();
42
43 if !types.is_empty() {
45 let mut seen_type_names = std::collections::HashSet::new();
48 let mut deduplicated_types = Vec::new();
49 for t in types {
50 let type_name = if let Some(start) = t.content.find("export type ") {
52 let after_export_type = &t.content[start + 12..];
53 if let Some(end) = after_export_type.find([' ', '=', '\n']) {
54 after_export_type[..end].trim().to_string()
55 } else {
56 after_export_type.trim().to_string()
57 }
58 } else if let Some(start) = t.content.find("export interface ") {
59 let after_export_interface = &t.content[start + 17..];
60 if let Some(end) = after_export_interface.find([' ', '{', '\n']) {
61 after_export_interface[..end].trim().to_string()
62 } else {
63 after_export_interface.trim().to_string()
64 }
65 } else {
66 t.content.clone()
68 };
69
70 if !seen_type_names.contains(&type_name) {
71 seen_type_names.insert(type_name);
72 deduplicated_types.push(t);
73 }
74 }
75
76 let types_content_raw = deduplicated_types
77 .iter()
78 .map(|t| t.content.clone())
79 .collect::<Vec<_>>()
80 .join("\n\n");
81
82 let needs_common_import = types_content_raw.contains("Common.");
84 let common_import = if needs_common_import {
85 let depth = module_name.matches('/').count() + 1;
87 let relative_path = "../".repeat(depth);
88 format!("import * as Common from \"{}common\";\n\n", relative_path)
89 } else {
90 String::new()
91 };
92
93 let types_content =
94 format_typescript_code(&format!("{}{}", common_import, types_content_raw));
95
96 let types_file = module_dir.join("types.ts");
97 write_file_with_backup(&types_file, &types_content, backup, force)?;
98 written_files.push(types_file);
99 }
100
101 if !zod_schemas.is_empty() {
103 let zod_content_raw = zod_schemas
104 .iter()
105 .map(|z| z.content.clone())
106 .collect::<Vec<_>>()
107 .join("\n\n");
108
109 let needs_common_import = zod_content_raw.contains("Common.");
111 let common_import = if needs_common_import {
112 let depth = module_name.matches('/').count() + 1;
114 let relative_path = "../".repeat(depth);
115 format!("import * as Common from \"{}common\";\n\n", relative_path)
116 } else {
117 String::new()
118 };
119
120 let zod_content = format_typescript_code(&format!(
121 "import {{ z }} from \"zod\";\n{}{}",
122 if !common_import.is_empty() {
123 &common_import
124 } else {
125 ""
126 },
127 zod_content_raw
128 ));
129
130 let zod_file = module_dir.join("schemas.ts");
131 write_file_with_backup(&zod_file, &zod_content, backup, force)?;
132 written_files.push(zod_file);
133 }
134
135 let mut index_exports = Vec::new();
137 if !types.is_empty() {
138 index_exports.push("export * from \"./types\";".to_string());
139 }
140 if !zod_schemas.is_empty() {
141 index_exports.push("export * from \"./schemas\";".to_string());
142 }
143
144 if !index_exports.is_empty() {
145 let index_content = format_typescript_code(&(index_exports.join("\n") + "\n"));
149 let index_file = module_dir.join("index.ts");
150 write_file_with_backup(&index_file, &index_content, backup, force)?;
151 written_files.push(index_file);
152 }
153
154 Ok(written_files)
155}
156
157pub fn write_api_client(
158 output_dir: &Path,
159 module_name: &str,
160 functions: &[ApiFunction],
161) -> Result<Vec<PathBuf>> {
162 write_api_client_with_options(output_dir, module_name, functions, false, false)
163}
164
165pub fn write_api_client_with_options(
166 output_dir: &Path,
167 module_name: &str,
168 functions: &[ApiFunction],
169 backup: bool,
170 force: bool,
171) -> Result<Vec<PathBuf>> {
172 let module_dir = output_dir.join(sanitize_module_name(module_name));
173 ensure_directory(&module_dir)?;
174
175 let mut written_files = Vec::new();
176
177 if !functions.is_empty() {
178 let mut imports_by_module: std::collections::HashMap<
182 String,
183 (std::collections::HashSet<String>, Vec<String>),
184 > = std::collections::HashMap::new();
185 let mut function_bodies = Vec::new();
186 let mut seen_functions: std::collections::HashSet<String> =
187 std::collections::HashSet::new();
188
189 for func in functions {
190 let lines: Vec<&str> = func.content.lines().collect();
191 let mut func_lines = Vec::new();
192 let mut in_function = false;
193 let mut function_name: Option<String> = None;
194
195 for line in lines {
196 if line.trim().starts_with("import ") {
197 let import_line = line.trim().trim_end_matches(';').trim();
198 if let Some(from_pos) = import_line.find(" from ") {
200 let before_from = &import_line[..from_pos];
201 let after_from = &import_line[from_pos + 6..];
202 let module_path = after_from.trim_matches('"').trim_matches('\'').trim();
203
204 if before_from.contains("import type {") {
206 if let Some(start) = before_from.find('{') {
208 if let Some(end) = before_from.find('}') {
209 let items_str = &before_from[start + 1..end];
210 let items: Vec<String> = items_str
211 .split(',')
212 .map(|s| s.trim().to_string())
213 .filter(|s| !s.is_empty())
214 .collect();
215
216 let (type_imports, _) = imports_by_module
217 .entry(module_path.to_string())
218 .or_insert_with(|| {
219 (std::collections::HashSet::new(), Vec::new())
220 });
221 type_imports.extend(items);
222 }
223 }
224 } else if before_from.contains("import * as ") {
225 let (_, other_imports) = imports_by_module
228 .entry(module_path.to_string())
229 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
230 other_imports.push(import_line.to_string());
231 } else {
232 let (_, other_imports) = imports_by_module
235 .entry(module_path.to_string())
236 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
237 other_imports.push(import_line.to_string());
238 }
239 } else {
240 let (_, other_imports) = imports_by_module
242 .entry("".to_string())
243 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
244 other_imports.push(import_line.to_string());
245 }
246 } else if line.trim().starts_with("export const ") {
247 let trimmed = line.trim();
250 if trimmed.len() > 13 {
251 let after_export_const = &trimmed[13..];
252 let name_end = after_export_const
254 .find(' ')
255 .or_else(|| after_export_const.find('('))
256 .unwrap_or(after_export_const.len());
257 let name = after_export_const[..name_end].trim().to_string();
258 if !name.is_empty() {
259 function_name = Some(name.clone());
260 if seen_functions.contains(&name) {
261 break;
263 }
264 seen_functions.insert(name);
265 }
266 }
267 in_function = true;
268 func_lines.push(line);
269 } else if in_function {
270 func_lines.push(line);
271 if line.trim() == "};" {
273 break;
274 }
275 }
276 }
278
279 if !func_lines.is_empty() && function_name.is_some() {
280 function_bodies.push(func_lines.join("\n"));
281 }
282 }
283
284 let mut imports_vec = Vec::new();
287 for (module_path, (type_import_items, other_imports)) in imports_by_module.iter() {
288 if module_path.is_empty() {
289 let deduped: std::collections::HashSet<String> =
291 other_imports.iter().cloned().collect();
292 imports_vec.extend(deduped.into_iter());
293 } else {
294 let deduped_imports: std::collections::HashSet<String> =
296 other_imports.iter().cloned().collect();
297 let mut namespace_imports = Vec::new();
298 let mut default_imports = Vec::new();
299
300 for item in deduped_imports.iter() {
301 if item.contains("import * as") {
302 namespace_imports.push(item.clone());
304 } else {
305 default_imports.push(item.clone());
307 }
308 }
309
310 namespace_imports.sort();
312 for ns_import in namespace_imports {
313 imports_vec.push(format!("{};", ns_import));
314 }
315
316 default_imports.sort();
318 for default_import in default_imports {
319 imports_vec.push(format!("{};", default_import));
320 }
321
322 if !type_import_items.is_empty() {
324 let mut sorted_types: Vec<String> = type_import_items.iter().cloned().collect();
325 sorted_types.sort();
326 imports_vec.push(format!(
327 "import type {{ {} }} from \"{}\";",
328 sorted_types.join(", "),
329 module_path
330 ));
331 }
332 }
333 }
334 let imports_str = imports_vec.join("\n");
335 let functions_str = function_bodies.join("\n\n");
336 let combined_content = if !imports_str.is_empty() {
337 format!("{}\n\n{}", imports_str, functions_str)
338 } else {
339 functions_str
340 };
341
342 let functions_content = format_typescript_code(&combined_content);
343
344 let api_file = module_dir.join("index.ts");
345 write_file_with_backup(&api_file, &functions_content, backup, force)?;
346 written_files.push(api_file);
347 }
348
349 Ok(written_files)
350}
351
352pub fn write_http_client_template(output_path: &Path) -> Result<()> {
353 ensure_directory(output_path.parent().unwrap_or(Path::new(".")))?;
354
355 let http_client_content = r#"const requestInitIndicators = [
356 "method",
357 "headers",
358 "body",
359 "signal",
360 "credentials",
361 "cache",
362 "redirect",
363 "referrer",
364 "referrerPolicy",
365 "integrity",
366 "keepalive",
367 "mode",
368 "priority",
369 "window",
370];
371
372const isRequestInitLike = (value: unknown): value is RequestInit => {
373 if (!value || typeof value !== "object") {
374 return false;
375 }
376 const candidate = value as Record<string, unknown>;
377 return requestInitIndicators.some((key) => key in candidate);
378};
379
380export const http = {
381 // GET helper. Second argument can be either a RequestInit or a JSON body for uncommon GET-with-body endpoints.
382 async get<T = any>(url: string, optionsOrBody?: RequestInit | unknown): Promise<T> {
383 let init: RequestInit = { method: "GET", body: null };
384
385 if (optionsOrBody !== undefined && optionsOrBody !== null) {
386 if (isRequestInitLike(optionsOrBody)) {
387 const candidate = optionsOrBody as RequestInit;
388 init = {
389 ...candidate,
390 method: "GET",
391 body: candidate.body ?? null,
392 };
393 } else {
394 init = {
395 method: "GET",
396 headers: {
397 "Content-Type": "application/json",
398 },
399 body: JSON.stringify(optionsOrBody),
400 };
401 }
402 }
403
404 const response = await fetch(url, {
405 ...init,
406 });
407 if (!response.ok) {
408 throw new Error(`HTTP error! status: ${response.status}`);
409 }
410 return response.json();
411 },
412
413 async post<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
414 const response = await fetch(url, {
415 ...options,
416 method: "POST",
417 headers: {
418 "Content-Type": "application/json",
419 ...(options.headers || {}),
420 },
421 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
422 });
423 if (!response.ok) {
424 throw new Error(`HTTP error! status: ${response.status}`);
425 }
426 return response.json();
427 },
428
429 async put<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
430 const response = await fetch(url, {
431 ...options,
432 method: "PUT",
433 headers: {
434 "Content-Type": "application/json",
435 ...(options.headers || {}),
436 },
437 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
438 });
439 if (!response.ok) {
440 throw new Error(`HTTP error! status: ${response.status}`);
441 }
442 return response.json();
443 },
444
445 async delete<T = any>(url: string, options: RequestInit = {}): Promise<T> {
446 const response = await fetch(url, {
447 ...options,
448 method: "DELETE",
449 body: options.body ?? null,
450 });
451 if (!response.ok) {
452 throw new Error(`HTTP error! status: ${response.status}`);
453 }
454 return response.json();
455 },
456
457 async patch<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
458 const response = await fetch(url, {
459 ...options,
460 method: "PATCH",
461 headers: {
462 "Content-Type": "application/json",
463 ...(options.headers || {}),
464 },
465 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
466 });
467 if (!response.ok) {
468 throw new Error(`HTTP error! status: ${response.status}`);
469 }
470 return response.json();
471 },
472
473 async head(url: string, options: RequestInit = {}): Promise<Response> {
474 const response = await fetch(url, {
475 ...options,
476 method: "HEAD",
477 body: options.body ?? null,
478 });
479 if (!response.ok) {
480 throw new Error(`HTTP error! status: ${response.status}`);
481 }
482 return response;
483 },
484
485 async options<T = any>(url: string, options: RequestInit = {}): Promise<T> {
486 const response = await fetch(url, {
487 ...options,
488 method: "OPTIONS",
489 body: options.body ?? null,
490 });
491 if (!response.ok) {
492 throw new Error(`HTTP error! status: ${response.status}`);
493 }
494 return response.json();
495 },
496};
497"#;
498
499 write_file_safe(output_path, http_client_content)?;
500
501 Ok(())
502}
503
504fn format_typescript_code(code: &str) -> String {
505 let lines: Vec<&str> = code.lines().collect();
507 let mut formatted = Vec::new();
508 let mut last_was_empty = false;
509
510 for line in lines {
511 let trimmed = line.trim();
512 if trimmed.is_empty() {
513 if !last_was_empty && !formatted.is_empty() {
514 formatted.push(String::new());
515 last_was_empty = true;
516 }
517 continue;
518 }
519 last_was_empty = false;
520 formatted.push(trimmed.to_string());
521 }
522
523 while formatted.last().map(|s| s.is_empty()).unwrap_or(false) {
525 formatted.pop();
526 }
527
528 formatted.join("\n")
529}
530
531pub fn write_file_safe(path: &Path, content: &str) -> Result<()> {
532 write_file_with_backup(path, content, false, false)
533}
534
535pub fn write_file_with_backup(path: &Path, content: &str, backup: bool, force: bool) -> Result<()> {
536 let file_exists = path.exists();
538 let should_write = if file_exists {
539 if let Ok(existing_content) = std::fs::read_to_string(path) {
540 existing_content != content
541 } else {
542 true
543 }
544 } else {
545 true
546 };
547
548 if !should_write {
549 return Ok(());
551 }
552
553 if backup && file_exists {
555 create_backup(path)?;
556 }
557
558 if !force && file_exists {
560 if let Ok(metadata) = load_file_metadata(path) {
561 let current_hash = compute_content_hash(content);
562 let file_hash = compute_file_hash(path)?;
563 if metadata.hash != current_hash && metadata.hash != file_hash {
564 return Err(FileSystemError::FileModifiedByUser {
566 path: path.display().to_string(),
567 }
568 .into());
569 }
570 }
571 }
572
573 std::fs::write(path, content).map_err(|e| FileSystemError::WriteFileFailed {
575 path: path.display().to_string(),
576 source: e,
577 })?;
578
579 save_file_metadata(path, content)?;
581
582 Ok(())
583}
584
585fn create_backup(path: &Path) -> Result<()> {
586 use std::collections::hash_map::DefaultHasher;
587 use std::hash::{Hash, Hasher};
588 use std::time::{SystemTime, UNIX_EPOCH};
589
590 let timestamp = SystemTime::now()
591 .duration_since(UNIX_EPOCH)
592 .unwrap()
593 .as_secs();
594
595 let backup_dir = PathBuf::from(format!(".vika-backup/{}", timestamp));
596 std::fs::create_dir_all(&backup_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
597 path: backup_dir.display().to_string(),
598 source: e,
599 })?;
600
601 let backup_path = if path.is_absolute() {
603 let path_str = path.display().to_string();
606 let mut hasher = DefaultHasher::new();
607 path_str.hash(&mut hasher);
608 let hash = format!("{:x}", hasher.finish());
609 let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or("file");
610 backup_dir.join(format!("{}_{}", hash, filename))
611 } else {
612 let relative_path = path.strip_prefix(".").unwrap_or(path);
614 backup_dir.join(relative_path)
615 };
616
617 if let Some(parent) = backup_path.parent() {
618 std::fs::create_dir_all(parent).map_err(|e| FileSystemError::CreateDirectoryFailed {
619 path: parent.display().to_string(),
620 source: e,
621 })?;
622 }
623
624 std::fs::copy(path, &backup_path).map_err(|e| FileSystemError::WriteFileFailed {
625 path: backup_path.display().to_string(),
626 source: e,
627 })?;
628
629 Ok(())
630}
631
632#[derive(Clone, serde::Serialize, serde::Deserialize)]
633struct FileMetadata {
634 hash: String,
635 generated_at: u64,
636 generated_by: String,
637}
638
639fn compute_content_hash(content: &str) -> String {
640 let mut hasher = DefaultHasher::new();
641 content.hash(&mut hasher);
642 format!("{:x}", hasher.finish())
643}
644
645fn compute_file_hash(path: &Path) -> Result<String> {
646 let content = std::fs::read_to_string(path).map_err(|e| FileSystemError::ReadFileFailed {
647 path: path.display().to_string(),
648 source: e,
649 })?;
650 Ok(compute_content_hash(&content))
651}
652
653fn save_file_metadata(path: &Path, content: &str) -> Result<()> {
654 let metadata_dir = PathBuf::from(".vika-cache");
655 std::fs::create_dir_all(&metadata_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
656 path: metadata_dir.display().to_string(),
657 source: e,
658 })?;
659
660 let metadata_file = metadata_dir.join("file-metadata.json");
661 let mut metadata_map: std::collections::HashMap<String, FileMetadata> =
662 if metadata_file.exists() {
663 let content = std::fs::read_to_string(&metadata_file).map_err(|e| {
664 FileSystemError::ReadFileFailed {
665 path: metadata_file.display().to_string(),
666 source: e,
667 }
668 })?;
669 serde_json::from_str(&content).unwrap_or_default()
670 } else {
671 std::collections::HashMap::new()
672 };
673
674 let hash = compute_content_hash(content);
675 let generated_at = SystemTime::now()
676 .duration_since(std::time::UNIX_EPOCH)
677 .unwrap()
678 .as_secs();
679
680 metadata_map.insert(
681 path.display().to_string(),
682 FileMetadata {
683 hash,
684 generated_at,
685 generated_by: "vika-cli".to_string(),
686 },
687 );
688
689 let json = serde_json::to_string_pretty(&metadata_map).map_err(|e| {
690 FileSystemError::WriteFileFailed {
691 path: metadata_file.display().to_string(),
692 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
693 }
694 })?;
695
696 std::fs::write(&metadata_file, json).map_err(|e| FileSystemError::WriteFileFailed {
697 path: metadata_file.display().to_string(),
698 source: e,
699 })?;
700
701 Ok(())
702}
703
704fn load_file_metadata(path: &Path) -> Result<FileMetadata> {
705 let metadata_file = PathBuf::from(".vika-cache/file-metadata.json");
706 if !metadata_file.exists() {
707 return Err(FileSystemError::FileNotFound {
708 path: metadata_file.display().to_string(),
709 }
710 .into());
711 }
712
713 let content =
714 std::fs::read_to_string(&metadata_file).map_err(|e| FileSystemError::ReadFileFailed {
715 path: metadata_file.display().to_string(),
716 source: e,
717 })?;
718
719 let metadata_map: std::collections::HashMap<String, FileMetadata> =
720 serde_json::from_str(&content).map_err(|e| FileSystemError::ReadFileFailed {
721 path: metadata_file.display().to_string(),
722 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
723 })?;
724
725 metadata_map
726 .get(&path.display().to_string())
727 .cloned()
728 .ok_or_else(|| {
729 FileSystemError::FileNotFound {
730 path: path.display().to_string(),
731 }
732 .into()
733 })
734}