1use crate::error::{FileSystemError, Result};
2use crate::generator::api_client::ApiFunction;
3use crate::generator::ts_typings::TypeScriptType;
4use crate::generator::zod_schema::ZodSchema;
5use crate::generator::utils::sanitize_module_name;
6use std::collections::hash_map::DefaultHasher;
7use std::hash::{Hash, Hasher};
8use std::path::{Path, PathBuf};
9use std::time::SystemTime;
10
11pub fn ensure_directory(path: &Path) -> Result<()> {
12 if !path.exists() {
13 std::fs::create_dir_all(path).map_err(|e| FileSystemError::CreateDirectoryFailed {
14 path: path.display().to_string(),
15 source: e,
16 })?;
17 }
18 Ok(())
19}
20
21pub fn write_schemas(
22 output_dir: &Path,
23 module_name: &str,
24 types: &[TypeScriptType],
25 zod_schemas: &[ZodSchema],
26) -> Result<Vec<PathBuf>> {
27 write_schemas_with_options(output_dir, module_name, types, zod_schemas, false, false)
28}
29
30pub fn write_schemas_with_options(
31 output_dir: &Path,
32 module_name: &str,
33 types: &[TypeScriptType],
34 zod_schemas: &[ZodSchema],
35 backup: bool,
36 force: bool,
37) -> Result<Vec<PathBuf>> {
38 let module_dir = output_dir.join(sanitize_module_name(module_name));
39 ensure_directory(&module_dir)?;
40
41 let mut written_files = Vec::new();
42
43 if !types.is_empty() {
45 let mut seen_type_names = std::collections::HashSet::new();
48 let mut deduplicated_types = Vec::new();
49 for t in types {
50 let type_name = if let Some(start) = t.content.find("export type ") {
52 let after_export_type = &t.content[start + 12..];
53 if let Some(end) = after_export_type.find(|c: char| c == ' ' || c == '=' || c == '\n') {
54 after_export_type[..end].trim().to_string()
55 } else {
56 after_export_type.trim().to_string()
57 }
58 } else if let Some(start) = t.content.find("export interface ") {
59 let after_export_interface = &t.content[start + 17..];
60 if let Some(end) = after_export_interface.find(|c: char| c == ' ' || c == '{' || c == '\n') {
61 after_export_interface[..end].trim().to_string()
62 } else {
63 after_export_interface.trim().to_string()
64 }
65 } else {
66 t.content.clone()
68 };
69
70 if !seen_type_names.contains(&type_name) {
71 seen_type_names.insert(type_name);
72 deduplicated_types.push(t);
73 }
74 }
75
76 let types_content_raw = deduplicated_types
77 .iter()
78 .map(|t| t.content.clone())
79 .collect::<Vec<_>>()
80 .join("\n\n");
81
82 let needs_common_import = types_content_raw.contains("Common.");
84 let common_import = if needs_common_import {
85 let depth = module_name.matches('/').count() + 1;
87 let relative_path = "../".repeat(depth);
88 format!("import * as Common from \"{}common\";\n\n", relative_path)
89 } else {
90 String::new()
91 };
92
93 let types_content = format_typescript_code(
94 &format!("{}{}", common_import, types_content_raw),
95 );
96
97 let types_file = module_dir.join("types.ts");
98 write_file_with_backup(&types_file, &types_content, backup, force)?;
99 written_files.push(types_file);
100 }
101
102 if !zod_schemas.is_empty() {
104 let zod_content_raw = zod_schemas
105 .iter()
106 .map(|z| z.content.clone())
107 .collect::<Vec<_>>()
108 .join("\n\n");
109
110 let needs_common_import = zod_content_raw.contains("Common.");
112 let common_import = if needs_common_import {
113 let depth = module_name.matches('/').count() + 1;
115 let relative_path = "../".repeat(depth);
116 format!("import * as Common from \"{}common\";\n\n", relative_path)
117 } else {
118 String::new()
119 };
120
121 let zod_content = format_typescript_code(&format!(
122 "import {{ z }} from \"zod\";\n{}{}",
123 if !common_import.is_empty() { &common_import } else { "" },
124 zod_content_raw
125 ));
126
127 let zod_file = module_dir.join("schemas.ts");
128 write_file_with_backup(&zod_file, &zod_content, backup, force)?;
129 written_files.push(zod_file);
130 }
131
132 let mut index_exports = Vec::new();
134 if !types.is_empty() {
135 index_exports.push("export * from \"./types\";".to_string());
136 }
137 if !zod_schemas.is_empty() {
138 index_exports.push("export * from \"./schemas\";".to_string());
139 }
140
141 if !index_exports.is_empty() {
142 let index_content = format_typescript_code(&(index_exports.join("\n") + "\n"));
146 let index_file = module_dir.join("index.ts");
147 write_file_with_backup(&index_file, &index_content, backup, force)?;
148 written_files.push(index_file);
149 }
150
151 Ok(written_files)
152}
153
154pub fn write_api_client(
155 output_dir: &Path,
156 module_name: &str,
157 functions: &[ApiFunction],
158) -> Result<Vec<PathBuf>> {
159 write_api_client_with_options(output_dir, module_name, functions, false, false)
160}
161
162pub fn write_api_client_with_options(
163 output_dir: &Path,
164 module_name: &str,
165 functions: &[ApiFunction],
166 backup: bool,
167 force: bool,
168) -> Result<Vec<PathBuf>> {
169 let module_dir = output_dir.join(sanitize_module_name(module_name));
170 ensure_directory(&module_dir)?;
171
172 let mut written_files = Vec::new();
173
174 if !functions.is_empty() {
175 let mut imports_by_module: std::collections::HashMap<String, (std::collections::HashSet<String>, Vec<String>)> = std::collections::HashMap::new();
179 let mut function_bodies = Vec::new();
180 let mut seen_functions: std::collections::HashSet<String> = std::collections::HashSet::new();
181
182 for func in functions {
183 let lines: Vec<&str> = func.content.lines().collect();
184 let mut func_lines = Vec::new();
185 let mut in_function = false;
186 let mut function_name: Option<String> = None;
187
188 for line in lines {
189 if line.trim().starts_with("import ") {
190 let import_line = line.trim().trim_end_matches(';').trim();
191 if let Some(from_pos) = import_line.find(" from ") {
193 let before_from = &import_line[..from_pos];
194 let after_from = &import_line[from_pos + 6..];
195 let module_path = after_from.trim_matches('"').trim_matches('\'').trim();
196
197 if before_from.contains("import type {") {
199 if let Some(start) = before_from.find('{') {
201 if let Some(end) = before_from.find('}') {
202 let items_str = &before_from[start + 1..end];
203 let items: Vec<String> = items_str
204 .split(',')
205 .map(|s| s.trim().to_string())
206 .filter(|s| !s.is_empty())
207 .collect();
208
209 let (type_imports, _) = imports_by_module
210 .entry(module_path.to_string())
211 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
212 type_imports.extend(items);
213 }
214 }
215 } else if before_from.contains("import * as ") {
216 let (_, other_imports) = imports_by_module
219 .entry(module_path.to_string())
220 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
221 other_imports.push(import_line.to_string());
222 } else {
223 let (_, other_imports) = imports_by_module
226 .entry(module_path.to_string())
227 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
228 other_imports.push(import_line.to_string());
229 }
230 } else {
231 let (_, other_imports) = imports_by_module
233 .entry("".to_string())
234 .or_insert_with(|| (std::collections::HashSet::new(), Vec::new()));
235 other_imports.push(import_line.to_string());
236 }
237 } else if line.trim().starts_with("export const ") {
238 let trimmed = line.trim();
241 if trimmed.len() > 13 {
242 let after_export_const = &trimmed[13..];
243 let name_end = after_export_const
245 .find(' ')
246 .or_else(|| after_export_const.find('('))
247 .unwrap_or(after_export_const.len());
248 let name = after_export_const[..name_end].trim().to_string();
249 if !name.is_empty() {
250 function_name = Some(name.clone());
251 if seen_functions.contains(&name) {
252 break;
254 }
255 seen_functions.insert(name);
256 }
257 }
258 in_function = true;
259 func_lines.push(line);
260 } else if in_function {
261 func_lines.push(line);
262 if line.trim() == "};" {
264 break;
265 }
266 }
267 }
269
270 if !func_lines.is_empty() && function_name.is_some() {
271 function_bodies.push(func_lines.join("\n"));
272 }
273 }
274
275 let mut imports_vec = Vec::new();
278 for (module_path, (type_import_items, other_imports)) in imports_by_module.iter() {
279 if module_path.is_empty() {
280 let deduped: std::collections::HashSet<String> = other_imports.iter().cloned().collect();
282 imports_vec.extend(deduped.into_iter());
283 } else {
284 let deduped_imports: std::collections::HashSet<String> = other_imports.iter().cloned().collect();
286 let mut namespace_imports = Vec::new();
287 let mut default_imports = Vec::new();
288
289 for item in deduped_imports.iter() {
290 if item.contains("import * as") {
291 namespace_imports.push(item.clone());
293 } else {
294 default_imports.push(item.clone());
296 }
297 }
298
299 namespace_imports.sort();
301 for ns_import in namespace_imports {
302 imports_vec.push(format!("{};", ns_import));
303 }
304
305 default_imports.sort();
307 for default_import in default_imports {
308 imports_vec.push(format!("{};", default_import));
309 }
310
311 if !type_import_items.is_empty() {
313 let mut sorted_types: Vec<String> = type_import_items.iter().cloned().collect();
314 sorted_types.sort();
315 imports_vec.push(format!("import type {{ {} }} from \"{}\";", sorted_types.join(", "), module_path));
316 }
317 }
318 }
319 let imports_str = imports_vec.join("\n");
320 let functions_str = function_bodies.join("\n\n");
321 let combined_content = if !imports_str.is_empty() {
322 format!("{}\n\n{}", imports_str, functions_str)
323 } else {
324 functions_str
325 };
326
327 let functions_content = format_typescript_code(&combined_content);
328
329 let api_file = module_dir.join("index.ts");
330 write_file_with_backup(&api_file, &functions_content, backup, force)?;
331 written_files.push(api_file);
332 }
333
334 Ok(written_files)
335}
336
337pub fn write_http_client_template(output_path: &Path) -> Result<()> {
338 ensure_directory(output_path.parent().unwrap_or(Path::new(".")))?;
339
340 let http_client_content = r#"const requestInitIndicators = [
341 "method",
342 "headers",
343 "body",
344 "signal",
345 "credentials",
346 "cache",
347 "redirect",
348 "referrer",
349 "referrerPolicy",
350 "integrity",
351 "keepalive",
352 "mode",
353 "priority",
354 "window",
355];
356
357const isRequestInitLike = (value: unknown): value is RequestInit => {
358 if (!value || typeof value !== "object") {
359 return false;
360 }
361 const candidate = value as Record<string, unknown>;
362 return requestInitIndicators.some((key) => key in candidate);
363};
364
365export const http = {
366 // GET helper. Second argument can be either a RequestInit or a JSON body for uncommon GET-with-body endpoints.
367 async get<T = any>(url: string, optionsOrBody?: RequestInit | unknown): Promise<T> {
368 let init: RequestInit = { method: "GET", body: null };
369
370 if (optionsOrBody !== undefined && optionsOrBody !== null) {
371 if (isRequestInitLike(optionsOrBody)) {
372 const candidate = optionsOrBody as RequestInit;
373 init = {
374 ...candidate,
375 method: "GET",
376 body: candidate.body ?? null,
377 };
378 } else {
379 init = {
380 method: "GET",
381 headers: {
382 "Content-Type": "application/json",
383 },
384 body: JSON.stringify(optionsOrBody),
385 };
386 }
387 }
388
389 const response = await fetch(url, {
390 ...init,
391 });
392 if (!response.ok) {
393 throw new Error(`HTTP error! status: ${response.status}`);
394 }
395 return response.json();
396 },
397
398 async post<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
399 const response = await fetch(url, {
400 ...options,
401 method: "POST",
402 headers: {
403 "Content-Type": "application/json",
404 ...(options.headers || {}),
405 },
406 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
407 });
408 if (!response.ok) {
409 throw new Error(`HTTP error! status: ${response.status}`);
410 }
411 return response.json();
412 },
413
414 async put<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
415 const response = await fetch(url, {
416 ...options,
417 method: "PUT",
418 headers: {
419 "Content-Type": "application/json",
420 ...(options.headers || {}),
421 },
422 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
423 });
424 if (!response.ok) {
425 throw new Error(`HTTP error! status: ${response.status}`);
426 }
427 return response.json();
428 },
429
430 async delete<T = any>(url: string, options: RequestInit = {}): Promise<T> {
431 const response = await fetch(url, {
432 ...options,
433 method: "DELETE",
434 body: options.body ?? null,
435 });
436 if (!response.ok) {
437 throw new Error(`HTTP error! status: ${response.status}`);
438 }
439 return response.json();
440 },
441
442 async patch<T = any>(url: string, body?: any, options: RequestInit = {}): Promise<T> {
443 const response = await fetch(url, {
444 ...options,
445 method: "PATCH",
446 headers: {
447 "Content-Type": "application/json",
448 ...(options.headers || {}),
449 },
450 body: body !== undefined ? JSON.stringify(body) : (options.body ?? null),
451 });
452 if (!response.ok) {
453 throw new Error(`HTTP error! status: ${response.status}`);
454 }
455 return response.json();
456 },
457
458 async head(url: string, options: RequestInit = {}): Promise<Response> {
459 const response = await fetch(url, {
460 ...options,
461 method: "HEAD",
462 body: options.body ?? null,
463 });
464 if (!response.ok) {
465 throw new Error(`HTTP error! status: ${response.status}`);
466 }
467 return response;
468 },
469
470 async options<T = any>(url: string, options: RequestInit = {}): Promise<T> {
471 const response = await fetch(url, {
472 ...options,
473 method: "OPTIONS",
474 body: options.body ?? null,
475 });
476 if (!response.ok) {
477 throw new Error(`HTTP error! status: ${response.status}`);
478 }
479 return response.json();
480 },
481};
482"#;
483
484 write_file_safe(output_path, http_client_content)?;
485
486 Ok(())
487}
488
489fn format_typescript_code(code: &str) -> String {
490 let lines: Vec<&str> = code.lines().collect();
492 let mut formatted = Vec::new();
493 let mut last_was_empty = false;
494
495 for line in lines {
496 let trimmed = line.trim();
497 if trimmed.is_empty() {
498 if !last_was_empty && !formatted.is_empty() {
499 formatted.push(String::new());
500 last_was_empty = true;
501 }
502 continue;
503 }
504 last_was_empty = false;
505 formatted.push(trimmed.to_string());
506 }
507
508 while formatted.last().map(|s| s.is_empty()).unwrap_or(false) {
510 formatted.pop();
511 }
512
513 formatted.join("\n")
514}
515
516pub fn write_file_safe(path: &Path, content: &str) -> Result<()> {
517 write_file_with_backup(path, content, false, false)
518}
519
520pub fn write_file_with_backup(path: &Path, content: &str, backup: bool, force: bool) -> Result<()> {
521 let file_exists = path.exists();
523 let should_write = if file_exists {
524 if let Ok(existing_content) = std::fs::read_to_string(path) {
525 existing_content != content
526 } else {
527 true
528 }
529 } else {
530 true
531 };
532
533 if !should_write {
534 return Ok(());
536 }
537
538 if backup && file_exists {
540 create_backup(path)?;
541 }
542
543 if !force && file_exists {
545 if let Ok(metadata) = load_file_metadata(path) {
546 let current_hash = compute_content_hash(content);
547 let file_hash = compute_file_hash(path)?;
548 if metadata.hash != current_hash && metadata.hash != file_hash {
549 return Err(FileSystemError::FileModifiedByUser {
551 path: path.display().to_string(),
552 }
553 .into());
554 }
555 }
556 }
557
558 std::fs::write(path, content).map_err(|e| FileSystemError::WriteFileFailed {
560 path: path.display().to_string(),
561 source: e,
562 })?;
563
564 save_file_metadata(path, content)?;
566
567 Ok(())
568}
569
570fn create_backup(path: &Path) -> Result<()> {
571 use std::collections::hash_map::DefaultHasher;
572 use std::hash::{Hash, Hasher};
573 use std::time::{SystemTime, UNIX_EPOCH};
574
575 let timestamp = SystemTime::now()
576 .duration_since(UNIX_EPOCH)
577 .unwrap()
578 .as_secs();
579
580 let backup_dir = PathBuf::from(format!(".vika-backup/{}", timestamp));
581 std::fs::create_dir_all(&backup_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
582 path: backup_dir.display().to_string(),
583 source: e,
584 })?;
585
586 let backup_path = if path.is_absolute() {
588 let path_str = path.display().to_string();
591 let mut hasher = DefaultHasher::new();
592 path_str.hash(&mut hasher);
593 let hash = format!("{:x}", hasher.finish());
594 let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or("file");
595 backup_dir.join(format!("{}_{}", hash, filename))
596 } else {
597 let relative_path = path.strip_prefix(".").unwrap_or(path);
599 backup_dir.join(relative_path)
600 };
601
602 if let Some(parent) = backup_path.parent() {
603 std::fs::create_dir_all(parent).map_err(|e| FileSystemError::CreateDirectoryFailed {
604 path: parent.display().to_string(),
605 source: e,
606 })?;
607 }
608
609 std::fs::copy(path, &backup_path).map_err(|e| FileSystemError::WriteFileFailed {
610 path: backup_path.display().to_string(),
611 source: e,
612 })?;
613
614 Ok(())
615}
616
617#[derive(Clone, serde::Serialize, serde::Deserialize)]
618struct FileMetadata {
619 hash: String,
620 generated_at: u64,
621 generated_by: String,
622}
623
624fn compute_content_hash(content: &str) -> String {
625 let mut hasher = DefaultHasher::new();
626 content.hash(&mut hasher);
627 format!("{:x}", hasher.finish())
628}
629
630fn compute_file_hash(path: &Path) -> Result<String> {
631 let content = std::fs::read_to_string(path).map_err(|e| FileSystemError::ReadFileFailed {
632 path: path.display().to_string(),
633 source: e,
634 })?;
635 Ok(compute_content_hash(&content))
636}
637
638fn save_file_metadata(path: &Path, content: &str) -> Result<()> {
639 let metadata_dir = PathBuf::from(".vika-cache");
640 std::fs::create_dir_all(&metadata_dir).map_err(|e| FileSystemError::CreateDirectoryFailed {
641 path: metadata_dir.display().to_string(),
642 source: e,
643 })?;
644
645 let metadata_file = metadata_dir.join("file-metadata.json");
646 let mut metadata_map: std::collections::HashMap<String, FileMetadata> =
647 if metadata_file.exists() {
648 let content = std::fs::read_to_string(&metadata_file).map_err(|e| {
649 FileSystemError::ReadFileFailed {
650 path: metadata_file.display().to_string(),
651 source: e,
652 }
653 })?;
654 serde_json::from_str(&content).unwrap_or_default()
655 } else {
656 std::collections::HashMap::new()
657 };
658
659 let hash = compute_content_hash(content);
660 let generated_at = SystemTime::now()
661 .duration_since(std::time::UNIX_EPOCH)
662 .unwrap()
663 .as_secs();
664
665 metadata_map.insert(
666 path.display().to_string(),
667 FileMetadata {
668 hash,
669 generated_at,
670 generated_by: "vika-cli".to_string(),
671 },
672 );
673
674 let json = serde_json::to_string_pretty(&metadata_map).map_err(|e| {
675 FileSystemError::WriteFileFailed {
676 path: metadata_file.display().to_string(),
677 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
678 }
679 })?;
680
681 std::fs::write(&metadata_file, json).map_err(|e| FileSystemError::WriteFileFailed {
682 path: metadata_file.display().to_string(),
683 source: e,
684 })?;
685
686 Ok(())
687}
688
689fn load_file_metadata(path: &Path) -> Result<FileMetadata> {
690 let metadata_file = PathBuf::from(".vika-cache/file-metadata.json");
691 if !metadata_file.exists() {
692 return Err(FileSystemError::FileNotFound {
693 path: metadata_file.display().to_string(),
694 }
695 .into());
696 }
697
698 let content =
699 std::fs::read_to_string(&metadata_file).map_err(|e| FileSystemError::ReadFileFailed {
700 path: metadata_file.display().to_string(),
701 source: e,
702 })?;
703
704 let metadata_map: std::collections::HashMap<String, FileMetadata> =
705 serde_json::from_str(&content).map_err(|e| FileSystemError::ReadFileFailed {
706 path: metadata_file.display().to_string(),
707 source: std::io::Error::new(std::io::ErrorKind::InvalidData, format!("{}", e)),
708 })?;
709
710 metadata_map
711 .get(&path.display().to_string())
712 .cloned()
713 .ok_or_else(|| {
714 FileSystemError::FileNotFound {
715 path: path.display().to_string(),
716 }
717 .into()
718 })
719}