Skip to main content

ralph/cli/queue/
import.rs

1//! Queue import subcommand for importing tasks from CSV, TSV, or JSON.
2//!
3//! Responsibilities:
4//! - Parse input formats (CSV, TSV, JSON) into task structures.
5//! - Normalize and backfill imported tasks (timestamps, IDs, list fields).
6//! - Apply duplicate handling policy (fail, skip, rename).
7//! - Merge imported tasks into the active queue with proper positioning.
8//!
9//! Not handled here:
10//! - Export functionality (see `crate::cli::queue::export`).
11//! - GUI-specific import workflows (this is a CLI command).
12//! - Complex schema migration between versions.
13//!
14//! Invariants/assumptions:
15//! - Always acquire queue lock before modifying queue files.
16//! - Never write to disk on parse/validation failures.
17//! - Undo snapshots are only created AFTER all validation succeeds (no orphaned snapshots on error).
18//! - Always backfill required timestamps (created_at, updated_at, completed_at for terminal statuses).
19//! - List fields are trimmed and empty items are dropped.
20
21use std::collections::{HashMap, HashSet};
22use std::io::Read;
23use std::path::PathBuf;
24
25use anyhow::{Context, Result, bail};
26use clap::Args;
27
28use crate::config::Resolved;
29use crate::contracts::{QueueFile, Task, TaskPriority, TaskStatus};
30use crate::queue;
31
32use super::QueueImportFormat;
33
34/// Arguments for `ralph queue import`.
35#[derive(Args)]
36#[command(
37    after_long_help = "Examples:\n  ralph queue export --format json | ralph queue import --format json --dry-run\n  ralph queue import --format csv --input tasks.csv\n  ralph queue import --format tsv --input - --on-duplicate rename < tasks.tsv\n  ralph queue import --format json --input tasks.json --on-duplicate skip"
38)]
39pub struct QueueImportArgs {
40    /// Input format.
41    #[arg(long, value_enum)]
42    pub format: QueueImportFormat,
43
44    /// Input file path (default: stdin). Use '-' for stdin.
45    #[arg(long, short)]
46    pub input: Option<PathBuf>,
47
48    /// Show what would change without writing to disk.
49    #[arg(long)]
50    pub dry_run: bool,
51
52    /// What to do if an imported task ID already exists.
53    #[arg(long, value_enum, default_value_t = OnDuplicate::Fail)]
54    pub on_duplicate: OnDuplicate,
55}
56
57/// Policy for handling duplicate task IDs during import.
58#[derive(Clone, Copy, Debug, clap::ValueEnum)]
59#[clap(rename_all = "snake_case")]
60pub enum OnDuplicate {
61    /// Fail with an error if a duplicate ID is found.
62    Fail,
63    /// Skip duplicate tasks and continue importing others.
64    Skip,
65    /// Generate a new ID for duplicate tasks.
66    Rename,
67}
68
69/// Summary of an import operation for logging.
70struct ImportReport {
71    parsed: usize,
72    imported: usize,
73    skipped_duplicates: usize,
74    renamed: usize,
75    rename_mappings: Vec<(String, String)>,
76}
77
78impl ImportReport {
79    fn summary(&self) -> String {
80        let mut parts = vec![format!("parsed {} task(s)", self.parsed)];
81        if self.imported > 0 {
82            parts.push(format!("imported {}", self.imported));
83        }
84        if self.skipped_duplicates > 0 {
85            parts.push(format!("skipped {} duplicate(s)", self.skipped_duplicates));
86        }
87        if self.renamed > 0 {
88            parts.push(format!("renamed {} task(s)", self.renamed));
89            // Show up to 50 rename mappings
90            let show_count = self.rename_mappings.len().min(50);
91            for (old, new) in &self.rename_mappings[..show_count] {
92                parts.push(format!("  {} -> {}", old, new));
93            }
94            if self.rename_mappings.len() > 50 {
95                parts.push(format!(
96                    "  ... and {} more",
97                    self.rename_mappings.len() - 50
98                ));
99            }
100        }
101        parts.join("; ")
102    }
103}
104
105pub(crate) fn handle(resolved: &Resolved, force: bool, args: QueueImportArgs) -> Result<()> {
106    let _queue_lock = queue::acquire_queue_lock(&resolved.repo_root, "queue import", force)?;
107
108    let input = read_input(args.input.as_ref()).context("read import input")?;
109
110    // Parse the input based on format
111    let mut imported = match args.format {
112        QueueImportFormat::Json => parse_json_tasks(&input)?,
113        QueueImportFormat::Csv => parse_csv_tasks(&input, b',')?,
114        QueueImportFormat::Tsv => parse_csv_tasks(&input, b'\t')?,
115    };
116
117    let now = crate::timeutil::now_utc_rfc3339_or_fallback();
118
119    // Load existing queue + done for uniqueness checks
120    let (mut queue_file, done_file) = crate::queue::load_and_validate_queues(resolved, true)?;
121    let done_ref = done_file
122        .as_ref()
123        .filter(|d| !d.tasks.is_empty() || resolved.done_path.exists());
124
125    // Normalize and backfill imported tasks
126    for task in &mut imported {
127        normalize_task(task, &now);
128    }
129
130    // Merge imported tasks
131    let report = merge_imported_tasks(
132        &mut queue_file,
133        done_ref,
134        imported,
135        &resolved.id_prefix,
136        resolved.id_width,
137        resolved.config.queue.max_dependency_depth.unwrap_or(10),
138        &now,
139        args.on_duplicate,
140    )?;
141
142    // Validate (including for dry-run). Dry-run should fail if the resulting queue would be invalid.
143    let warnings = queue::validate_queue_set(
144        &queue_file,
145        done_ref,
146        &resolved.id_prefix,
147        resolved.id_width,
148        resolved.config.queue.max_dependency_depth.unwrap_or(10),
149    )?;
150    queue::log_warnings(&warnings);
151
152    // Create undo snapshot before mutation (only if not dry-run and validation passed).
153    // This must happen AFTER all fallible operations (parsing, merging, validation) to avoid
154    // leaving orphaned snapshots when the import operation itself fails.
155    if !args.dry_run {
156        crate::undo::create_undo_snapshot(resolved, "queue import")?;
157    }
158
159    if args.dry_run {
160        log::info!("Dry run: no changes written. {}", report.summary());
161        return Ok(());
162    }
163
164    queue::save_queue(&resolved.queue_path, &queue_file)?;
165    log::info!("Imported tasks. {}", report.summary());
166
167    Ok(())
168}
169
170/// Read input from file or stdin.
171fn read_input(path: Option<&PathBuf>) -> Result<String> {
172    let use_stdin = path.is_none() || path.is_some_and(|p| p.as_os_str() == "-");
173
174    if use_stdin {
175        let mut buffer = String::new();
176        std::io::stdin()
177            .read_to_string(&mut buffer)
178            .context("read from stdin")?;
179        Ok(buffer)
180    } else {
181        let path = path.unwrap();
182        std::fs::read_to_string(path)
183            .with_context(|| format!("read import file {}", path.display()))
184    }
185}
186
187/// Parse JSON tasks from input.
188/// Accepts either a JSON array of tasks or a wrapper object { "version": 1, "tasks": [...] }.
189fn parse_json_tasks(input: &str) -> Result<Vec<Task>> {
190    let trimmed = input.trim();
191    if trimmed.is_empty() {
192        return Ok(Vec::new());
193    }
194
195    // First try parsing as Vec<Task>
196    match serde_json::from_str::<Vec<Task>>(trimmed) {
197        Ok(tasks) => Ok(tasks),
198        Err(arr_err) => {
199            // Try parsing as wrapper object
200            #[derive(serde::Deserialize)]
201            #[serde(deny_unknown_fields)]
202            struct TasksWrapper {
203                #[serde(default)]
204                version: Option<u32>,
205                tasks: Vec<Task>,
206            }
207
208            match serde_json::from_str::<TasksWrapper>(trimmed) {
209                Ok(wrapper) => {
210                    if let Some(ver) = wrapper.version
211                        && ver != 1
212                    {
213                        bail!(
214                            "Unsupported wrapper version: {}. Only version 1 is supported.",
215                            ver
216                        );
217                    }
218                    Ok(wrapper.tasks)
219                }
220                Err(_) => {
221                    // Return the original array parse error for clearer diagnostics
222                    bail!(
223                        "Invalid JSON format: {}. Expected array of tasks or {{\"version\": 1, \"tasks\": [...]}} wrapper.",
224                        arr_err
225                    )
226                }
227            }
228        }
229    }
230}
231
232/// Parse CSV/TSV tasks from input.
233fn parse_csv_tasks(input: &str, delimiter: u8) -> Result<Vec<Task>> {
234    let mut tasks = Vec::new();
235
236    if input.trim().is_empty() {
237        return Ok(tasks);
238    }
239
240    let mut reader = csv::ReaderBuilder::new()
241        .delimiter(delimiter)
242        .has_headers(true)
243        .flexible(true)
244        .from_reader(input.as_bytes());
245
246    let headers = reader
247        .headers()?
248        .iter()
249        .map(|h| h.to_lowercase())
250        .collect::<Vec<_>>();
251    let header_map: HashMap<String, usize> = headers
252        .iter()
253        .enumerate()
254        .map(|(i, h)| (h.clone(), i))
255        .collect();
256
257    // Check for required 'title' column
258    if !header_map.contains_key("title") {
259        bail!("CSV/TSV import requires a 'title' column");
260    }
261
262    for (row_idx, result) in reader.records().enumerate() {
263        let record = result.with_context(|| format!("parse CSV row {}", row_idx + 1))?;
264
265        let mut task = Task::default();
266
267        // Required: title
268        let title_idx = header_map["title"];
269        task.title = record
270            .get(title_idx)
271            .map(|s| s.trim().to_string())
272            .unwrap_or_default();
273        if task.title.is_empty() {
274            bail!("Row {}: title is required and cannot be empty", row_idx + 1);
275        }
276
277        // Optional: id
278        if let Some(&idx) = header_map.get("id") {
279            task.id = record
280                .get(idx)
281                .map(|s| s.trim().to_string())
282                .unwrap_or_default();
283        }
284
285        // Optional: status
286        if let Some(&idx) = header_map.get("status") {
287            let status_str = record.get(idx).unwrap_or("").trim().to_lowercase();
288            if !status_str.is_empty() {
289                task.status = parse_status(&status_str)?;
290            }
291        }
292
293        // Optional: priority
294        if let Some(&idx) = header_map.get("priority") {
295            let raw = record.get(idx).unwrap_or("");
296            let trimmed = raw.trim();
297            if !trimmed.is_empty() {
298                task.priority = trimmed.parse()?;
299            }
300        }
301
302        // Optional: tags (comma-separated)
303        if let Some(&idx) = header_map.get("tags") {
304            task.tags = parse_list_field(record.get(idx).unwrap_or(""), ',');
305        }
306
307        // Optional: scope (comma-separated)
308        if let Some(&idx) = header_map.get("scope") {
309            task.scope = parse_list_field(record.get(idx).unwrap_or(""), ',');
310        }
311
312        // Optional: evidence (semicolon-separated)
313        if let Some(&idx) = header_map.get("evidence") {
314            task.evidence = parse_list_field(record.get(idx).unwrap_or(""), ';');
315        }
316
317        // Optional: plan (semicolon-separated)
318        if let Some(&idx) = header_map.get("plan") {
319            task.plan = parse_list_field(record.get(idx).unwrap_or(""), ';');
320        }
321
322        // Optional: notes (semicolon-separated)
323        if let Some(&idx) = header_map.get("notes") {
324            task.notes = parse_list_field(record.get(idx).unwrap_or(""), ';');
325        }
326
327        // Optional: request
328        if let Some(&idx) = header_map.get("request") {
329            let req = record.get(idx).unwrap_or("").trim().to_string();
330            task.request = if req.is_empty() { None } else { Some(req) };
331        }
332
333        // Optional: created_at
334        if let Some(&idx) = header_map.get("created_at") {
335            let ts = record.get(idx).unwrap_or("").trim().to_string();
336            task.created_at = if ts.is_empty() { None } else { Some(ts) };
337        }
338
339        // Optional: updated_at
340        if let Some(&idx) = header_map.get("updated_at") {
341            let ts = record.get(idx).unwrap_or("").trim().to_string();
342            task.updated_at = if ts.is_empty() { None } else { Some(ts) };
343        }
344
345        // Optional: completed_at
346        if let Some(&idx) = header_map.get("completed_at") {
347            let ts = record.get(idx).unwrap_or("").trim().to_string();
348            task.completed_at = if ts.is_empty() { None } else { Some(ts) };
349        }
350
351        // Optional: depends_on (comma-separated)
352        if let Some(&idx) = header_map.get("depends_on") {
353            task.depends_on = parse_list_field(record.get(idx).unwrap_or(""), ',');
354        }
355
356        // Optional: blocks (comma-separated)
357        if let Some(&idx) = header_map.get("blocks") {
358            task.blocks = parse_list_field(record.get(idx).unwrap_or(""), ',');
359        }
360
361        // Optional: relates_to (comma-separated)
362        if let Some(&idx) = header_map.get("relates_to") {
363            task.relates_to = parse_list_field(record.get(idx).unwrap_or(""), ',');
364        }
365
366        // Optional: duplicates
367        if let Some(&idx) = header_map.get("duplicates") {
368            let dup = record.get(idx).unwrap_or("").trim().to_string();
369            task.duplicates = if dup.is_empty() { None } else { Some(dup) };
370        }
371
372        // Optional: custom_fields (k=v comma-separated)
373        if let Some(&idx) = header_map.get("custom_fields") {
374            task.custom_fields = parse_custom_fields(record.get(idx).unwrap_or(""))?;
375        }
376
377        // Optional: parent_id
378        if let Some(&idx) = header_map.get("parent_id") {
379            let pid = record.get(idx).unwrap_or("").trim().to_string();
380            task.parent_id = if pid.is_empty() { None } else { Some(pid) };
381        }
382
383        tasks.push(task);
384    }
385
386    Ok(tasks)
387}
388
389/// Parse a list field by splitting on delimiter and trimming/dropping empty items.
390fn parse_list_field(value: &str, delimiter: char) -> Vec<String> {
391    value
392        .split(delimiter)
393        .map(|s| s.trim().to_string())
394        .filter(|s| !s.is_empty())
395        .collect()
396}
397
398/// Parse custom fields from "k=v,k2=v2" format.
399fn parse_custom_fields(value: &str) -> Result<HashMap<String, String>> {
400    let mut fields = HashMap::new();
401    if value.trim().is_empty() {
402        return Ok(fields);
403    }
404
405    for pair in value.split(',') {
406        let pair = pair.trim();
407        if pair.is_empty() {
408            continue;
409        }
410
411        let parts: Vec<&str> = pair.splitn(2, '=').collect();
412        if parts.len() != 2 {
413            bail!(
414                "Invalid custom field format: '{}'. Expected 'key=value'.",
415                pair
416            );
417        }
418
419        let key = parts[0].trim();
420        let val = parts[1].trim();
421
422        if key.is_empty() {
423            bail!("Empty custom field key in '{}'", pair);
424        }
425        if key.chars().any(|c| c.is_whitespace()) {
426            bail!("Custom field key cannot contain whitespace: '{}'", key);
427        }
428
429        fields.insert(key.to_string(), val.to_string());
430    }
431
432    Ok(fields)
433}
434
435/// Parse status case-insensitively.
436fn parse_status(s: &str) -> Result<TaskStatus> {
437    match s.to_lowercase().as_str() {
438        "draft" => Ok(TaskStatus::Draft),
439        "todo" => Ok(TaskStatus::Todo),
440        "doing" => Ok(TaskStatus::Doing),
441        "done" => Ok(TaskStatus::Done),
442        "rejected" => Ok(TaskStatus::Rejected),
443        _ => bail!(
444            "Invalid status: '{}'. Expected: draft, todo, doing, done, rejected",
445            s
446        ),
447    }
448}
449
450/// Normalize a task: trim fields, drop empty list items, backfill timestamps.
451fn normalize_task(task: &mut Task, now: &str) {
452    // Trim ID and title
453    task.id = task.id.trim().to_string();
454    task.title = task.title.trim().to_string();
455
456    // Normalize list fields: trim and drop empty
457    task.tags = normalize_list(&task.tags);
458    task.scope = normalize_list(&task.scope);
459    task.evidence = normalize_list(&task.evidence);
460    task.plan = normalize_list(&task.plan);
461    task.notes = normalize_list(&task.notes);
462    task.depends_on = normalize_list(&task.depends_on);
463    task.blocks = normalize_list(&task.blocks);
464    task.relates_to = normalize_list(&task.relates_to);
465
466    // Normalize custom field keys
467    let mut normalized_fields = HashMap::new();
468    for (k, v) in &task.custom_fields {
469        let key = k.trim();
470        if !key.is_empty() {
471            normalized_fields.insert(key.to_string(), v.trim().to_string());
472        }
473    }
474    task.custom_fields = normalized_fields;
475
476    // Backfill timestamps
477    if task.created_at.as_ref().is_none_or(|t| t.trim().is_empty()) {
478        task.created_at = Some(now.to_string());
479    }
480    if task.updated_at.as_ref().is_none_or(|t| t.trim().is_empty()) {
481        task.updated_at = Some(now.to_string());
482    }
483    if matches!(task.status, TaskStatus::Done | TaskStatus::Rejected)
484        && task
485            .completed_at
486            .as_ref()
487            .is_none_or(|t| t.trim().is_empty())
488    {
489        task.completed_at = Some(now.to_string());
490    }
491}
492
493/// Normalize a list: trim items and drop empty strings.
494fn normalize_list(items: &[String]) -> Vec<String> {
495    items
496        .iter()
497        .map(|s| s.trim().to_string())
498        .filter(|s| !s.is_empty())
499        .collect()
500}
501
502/// Merge imported tasks into the queue with duplicate handling.
503#[allow(clippy::too_many_arguments)]
504fn merge_imported_tasks(
505    queue: &mut QueueFile,
506    done: Option<&QueueFile>,
507    imported: Vec<Task>,
508    id_prefix: &str,
509    id_width: usize,
510    max_depth: u8,
511    now: &str,
512    on_duplicate: OnDuplicate,
513) -> Result<ImportReport> {
514    // Build set of existing IDs
515    let mut existing_ids: HashSet<String> = queue.tasks.iter().map(|t| t.id.clone()).collect();
516    if let Some(d) = done {
517        existing_ids.extend(d.tasks.iter().map(|t| t.id.clone()));
518    }
519
520    let mut report = ImportReport {
521        parsed: imported.len(),
522        imported: 0,
523        skipped_duplicates: 0,
524        renamed: 0,
525        rename_mappings: Vec::new(),
526    };
527
528    let mut tasks_to_add: Vec<Task> = Vec::new();
529    struct NeedsId {
530        idx: usize, // index into tasks_to_add
531        old_id: Option<String>,
532    }
533    let mut needs_new_id: Vec<NeedsId> = Vec::new();
534
535    // First pass: handle duplicates and collect tasks
536    for mut task in imported {
537        // Skip empty/whitespace IDs for duplicate check - they'll get new IDs
538        let has_id = !task.id.is_empty();
539
540        if has_id {
541            let is_duplicate = existing_ids.contains(&task.id)
542                || tasks_to_add.iter().any(|t: &Task| t.id == task.id);
543
544            if is_duplicate {
545                match on_duplicate {
546                    OnDuplicate::Fail => {
547                        bail!(
548                            "Duplicate task ID detected: '{}'. Use --on-duplicate skip or rename to handle duplicates.",
549                            task.id
550                        );
551                    }
552                    OnDuplicate::Skip => {
553                        report.skipped_duplicates += 1;
554                        continue;
555                    }
556                    OnDuplicate::Rename => {
557                        let old_id = task.id.clone();
558                        task.id.clear(); // Will generate new ID
559                        needs_new_id.push(NeedsId {
560                            idx: tasks_to_add.len(),
561                            old_id: Some(old_id),
562                        });
563                        tasks_to_add.push(task);
564                        continue;
565                    }
566                }
567            }
568        } else {
569            // No ID provided, needs new ID
570            needs_new_id.push(NeedsId {
571                idx: tasks_to_add.len(),
572                old_id: None,
573            });
574        }
575
576        tasks_to_add.push(task);
577    }
578
579    // Generate new IDs for tasks that need them
580    if !needs_new_id.is_empty() {
581        // Create a temporary queue for ID generation
582        let mut temp_queue = queue.clone();
583
584        for need in &needs_new_id {
585            // Reserve a new ID by adding a placeholder task
586            let new_id = queue::next_id_across(&temp_queue, done, id_prefix, id_width, max_depth)?;
587
588            // Update the task with the new ID
589            if need.idx < tasks_to_add.len() {
590                let task = &mut tasks_to_add[need.idx];
591                if let Some(old_id) = need.old_id.as_ref() {
592                    report
593                        .rename_mappings
594                        .push((old_id.clone(), new_id.clone()));
595                }
596                task.id = new_id.clone();
597            }
598
599            // Add placeholder to temp_queue for next ID calculation
600            temp_queue.tasks.push(create_placeholder_task(new_id, now));
601        }
602    }
603    report.renamed = report.rename_mappings.len();
604
605    // Collect IDs of tasks being added
606    let new_task_ids: Vec<String> = tasks_to_add.iter().map(|t| t.id.clone()).collect();
607
608    // Add tasks to queue
609    queue.tasks.extend(tasks_to_add);
610    report.imported = new_task_ids.len();
611
612    // Reposition new tasks
613    if !new_task_ids.is_empty() {
614        let insert_at = crate::queue::operations::suggest_new_task_insert_index(queue);
615        crate::queue::operations::reposition_new_tasks(queue, &new_task_ids, insert_at);
616    }
617
618    Ok(report)
619}
620
621/// Create a minimal placeholder task for ID reservation.
622fn create_placeholder_task(id: String, now: &str) -> Task {
623    Task {
624        id,
625        title: "__import_id_reservation__".to_string(),
626        description: None,
627        status: TaskStatus::Todo,
628        priority: TaskPriority::Medium,
629        created_at: Some(now.to_string()),
630        updated_at: Some(now.to_string()),
631        ..Default::default()
632    }
633}
634
635#[cfg(test)]
636mod tests {
637    use super::*;
638
639    #[test]
640    fn parse_json_array_succeeds() {
641        let json = r#"[{"id": "RQ-0001", "title": "Test task", "status": "todo"}]"#;
642        let tasks = parse_json_tasks(json).unwrap();
643        assert_eq!(tasks.len(), 1);
644        assert_eq!(tasks[0].id, "RQ-0001");
645        assert_eq!(tasks[0].title, "Test task");
646    }
647
648    #[test]
649    fn parse_json_wrapper_succeeds() {
650        let json = r#"{"version": 1, "tasks": [{"id": "RQ-0001", "title": "Test"}]}"#;
651        let tasks = parse_json_tasks(json).unwrap();
652        assert_eq!(tasks.len(), 1);
653        assert_eq!(tasks[0].id, "RQ-0001");
654    }
655
656    #[test]
657    fn parse_json_wrapper_wrong_version_fails() {
658        let json = r#"{"version": 2, "tasks": [{"id": "RQ-0001", "title": "Test"}]}"#;
659        let result = parse_json_tasks(json);
660        assert!(result.is_err());
661        assert!(result.unwrap_err().to_string().contains("version"));
662    }
663
664    #[test]
665    fn parse_json_empty_input_returns_empty() {
666        let tasks = parse_json_tasks("").unwrap();
667        assert!(tasks.is_empty());
668        let tasks = parse_json_tasks("   ").unwrap();
669        assert!(tasks.is_empty());
670    }
671
672    #[test]
673    fn parse_csv_basic_succeeds() {
674        let csv = "id,title,status\nRQ-0001,Test task,todo\nRQ-0002,Another task,done";
675        let tasks = parse_csv_tasks(csv, b',').unwrap();
676        assert_eq!(tasks.len(), 2);
677        assert_eq!(tasks[0].id, "RQ-0001");
678        assert_eq!(tasks[0].title, "Test task");
679        assert_eq!(tasks[0].status, TaskStatus::Todo);
680        assert_eq!(tasks[1].status, TaskStatus::Done);
681    }
682
683    #[test]
684    fn parse_csv_missing_title_fails() {
685        let csv = "id,status\nRQ-0001,todo";
686        let result = parse_csv_tasks(csv, b',');
687        assert!(result.is_err());
688        assert!(result.unwrap_err().to_string().contains("title"));
689    }
690
691    #[test]
692    fn parse_csv_empty_title_fails() {
693        let csv = "id,title\nRQ-0001,";
694        let result = parse_csv_tasks(csv, b',');
695        assert!(result.is_err());
696    }
697
698    #[test]
699    fn parse_csv_list_fields_parsed() {
700        let csv = "title,tags,scope,evidence,plan,notes\nTest,a,b,c,d,e";
701        let tasks = parse_csv_tasks(csv, b',').unwrap();
702        assert_eq!(tasks[0].tags, vec!["a"]);
703        assert_eq!(tasks[0].scope, vec!["b"]);
704        assert_eq!(tasks[0].evidence, vec!["c"]);
705        assert_eq!(tasks[0].plan, vec!["d"]);
706        assert_eq!(tasks[0].notes, vec!["e"]);
707    }
708
709    #[test]
710    fn parse_csv_list_fields_drop_empty() {
711        // Use semicolon delimiter for tags to test empty value handling without CSV quoting issues
712        let csv = "title,evidence\nTest,a;;b;";
713        let tasks = parse_csv_tasks(csv, b',').unwrap();
714        assert_eq!(tasks[0].evidence, vec!["a", "b"]);
715    }
716
717    #[test]
718    fn parse_csv_semicolon_fields_parsed() {
719        let csv = "title,evidence,plan,notes\nTest,a;b,c;d,e;f;";
720        let tasks = parse_csv_tasks(csv, b',').unwrap();
721        assert_eq!(tasks[0].evidence, vec!["a", "b"]);
722        assert_eq!(tasks[0].plan, vec!["c", "d"]);
723        assert_eq!(tasks[0].notes, vec!["e", "f"]);
724    }
725
726    #[test]
727    fn parse_csv_custom_fields_parsed() {
728        // Quoted custom_fields value to handle comma within field
729        let csv = "title,custom_fields\nTest,\"a=1,b=two\"";
730        let tasks = parse_csv_tasks(csv, b',').unwrap();
731        assert_eq!(tasks[0].custom_fields.get("a"), Some(&"1".to_string()));
732        assert_eq!(tasks[0].custom_fields.get("b"), Some(&"two".to_string()));
733    }
734
735    #[test]
736    fn parse_csv_custom_fields_invalid_fails() {
737        let csv = "title,custom_fields\nTest,invalid_no_equals";
738        let result = parse_csv_tasks(csv, b',');
739        assert!(result.is_err());
740    }
741
742    #[test]
743    fn parse_csv_empty_custom_fields_ok() {
744        let csv = "title,custom_fields\nTest,";
745        let tasks = parse_csv_tasks(csv, b',').unwrap();
746        assert!(tasks[0].custom_fields.is_empty());
747    }
748
749    #[test]
750    fn parse_csv_unknown_columns_ignored() {
751        let csv = "id,title,unknown_col\nRQ-0001,Test,foo";
752        let tasks = parse_csv_tasks(csv, b',').unwrap();
753        assert_eq!(tasks[0].id, "RQ-0001");
754        assert_eq!(tasks[0].title, "Test");
755    }
756
757    #[test]
758    fn parse_tsv_succeeds() {
759        let tsv = "id\ttitle\tstatus\nRQ-0001\tTest\ttodo";
760        let tasks = parse_csv_tasks(tsv, b'\t').unwrap();
761        assert_eq!(tasks.len(), 1);
762        assert_eq!(tasks[0].id, "RQ-0001");
763    }
764
765    #[test]
766    fn parse_list_field_handles_delimiters() {
767        let result = parse_list_field("a, b, , c", ',');
768        assert_eq!(result, vec!["a", "b", "c"]);
769
770        let result = parse_list_field("x; y; ; z", ';');
771        assert_eq!(result, vec!["x", "y", "z"]);
772    }
773
774    #[test]
775    fn parse_status_case_insensitive() {
776        assert_eq!(parse_status("TODO").unwrap(), TaskStatus::Todo);
777        assert_eq!(parse_status("Todo").unwrap(), TaskStatus::Todo);
778        assert_eq!(parse_status("todo").unwrap(), TaskStatus::Todo);
779        assert_eq!(parse_status("DONE").unwrap(), TaskStatus::Done);
780        assert_eq!(parse_status("Rejected").unwrap(), TaskStatus::Rejected);
781    }
782
783    #[test]
784    fn parse_csv_invalid_priority_uses_canonical_parser_error() {
785        let csv = "title,priority\nTest,nope";
786        let err = parse_csv_tasks(csv, b',').unwrap_err();
787
788        let expected = "nope".parse::<TaskPriority>().unwrap_err().to_string();
789        let msg = err.to_string();
790
791        assert!(msg.contains(&expected), "err was: {msg}");
792    }
793
794    #[test]
795    fn normalize_task_trims_fields() {
796        let mut task = Task {
797            id: "  RQ-0001  ".to_string(),
798            title: "  Test  ".to_string(),
799            description: None,
800            tags: vec!["  a  ".to_string(), "".to_string(), "  b  ".to_string()],
801            ..Default::default()
802        };
803        normalize_task(&mut task, "2026-01-01T00:00:00.000000000Z");
804        assert_eq!(task.id, "RQ-0001");
805        assert_eq!(task.title, "Test");
806        assert_eq!(task.tags, vec!["a", "b"]);
807    }
808
809    #[test]
810    fn normalize_task_backfills_timestamps() {
811        let mut task = Task {
812            id: "RQ-0001".to_string(),
813            title: "Test".to_string(),
814            description: None,
815            status: TaskStatus::Todo,
816            ..Default::default()
817        };
818        let now = "2026-01-01T00:00:00.000000000Z";
819        normalize_task(&mut task, now);
820        assert_eq!(task.created_at, Some(now.to_string()));
821        assert_eq!(task.updated_at, Some(now.to_string()));
822        assert_eq!(task.completed_at, None);
823    }
824
825    #[test]
826    fn normalize_task_backfills_completed_at_for_terminal() {
827        let mut task = Task {
828            id: "RQ-0001".to_string(),
829            title: "Test".to_string(),
830            description: None,
831            status: TaskStatus::Done,
832            ..Default::default()
833        };
834        let now = "2026-01-01T00:00:00.000000000Z";
835        normalize_task(&mut task, now);
836        assert_eq!(task.completed_at, Some(now.to_string()));
837
838        let mut task2 = Task {
839            id: "RQ-0002".to_string(),
840            title: "Test".to_string(),
841            description: None,
842            status: TaskStatus::Rejected,
843            ..Default::default()
844        };
845        normalize_task(&mut task2, now);
846        assert_eq!(task2.completed_at, Some(now.to_string()));
847    }
848
849    #[test]
850    fn import_report_summary_format() {
851        let report = ImportReport {
852            parsed: 5,
853            imported: 3,
854            skipped_duplicates: 1,
855            renamed: 1,
856            rename_mappings: vec![("OLD-001".to_string(), "RQ-0001".to_string())],
857        };
858        let summary = report.summary();
859        assert!(summary.contains("parsed 5"));
860        assert!(summary.contains("imported 3"));
861        assert!(summary.contains("skipped 1"));
862        assert!(summary.contains("renamed 1"));
863        assert!(summary.contains("OLD-001 -> RQ-0001"));
864    }
865
866    #[test]
867    fn merge_imported_tasks_rename_records_mapping() {
868        let mut queue = QueueFile {
869            version: 1,
870            tasks: vec![Task {
871                id: "RQ-0001".to_string(),
872                title: "Existing".to_string(),
873                description: None,
874                status: TaskStatus::Todo,
875                created_at: Some("2026-01-01T00:00:00Z".to_string()),
876                updated_at: Some("2026-01-01T00:00:00Z".to_string()),
877                ..Default::default()
878            }],
879        };
880
881        let imported = vec![Task {
882            id: "RQ-0001".to_string(),
883            title: "Duplicate".to_string(),
884            description: None,
885            status: TaskStatus::Todo,
886            created_at: Some("2026-01-02T00:00:00Z".to_string()),
887            updated_at: Some("2026-01-02T00:00:00Z".to_string()),
888            ..Default::default()
889        }];
890
891        let report = merge_imported_tasks(
892            &mut queue,
893            None,
894            imported,
895            "RQ",
896            4,
897            10,
898            "2026-01-03T00:00:00Z",
899            OnDuplicate::Rename,
900        )
901        .unwrap();
902
903        assert_eq!(report.renamed, 1);
904        assert_eq!(report.rename_mappings.len(), 1);
905        assert_eq!(report.rename_mappings[0].0, "RQ-0001");
906        assert!(report.rename_mappings[0].1.starts_with("RQ-"));
907        assert_eq!(queue.tasks.len(), 2);
908        assert!(queue.tasks.iter().any(|t| t.id == "RQ-0001"));
909        let dup = queue
910            .tasks
911            .iter()
912            .find(|t| t.title == "Duplicate")
913            .expect("imported duplicate task");
914        assert_ne!(dup.id, "RQ-0001");
915    }
916}