1use std::collections::BTreeMap;
2use std::fs;
3use std::path::{Component, Path, PathBuf};
4
5use serde::{Deserialize, Serialize};
6use sqlx::SqliteConnection;
7use time::OffsetDateTime;
8
9use crate::backend_config::{RemoteAgentConfigService, REMOTE_AGENT_SETTING_KEY};
10use crate::config::{ConfigService, RemoteAgentConfigFile, TrackConfigFile};
11use crate::database::DatabaseContext;
12use crate::dispatch_repository::DispatchRepository;
13use crate::errors::{ErrorCode, TrackError};
14use crate::migration::{
15 CleanupCandidate, LegacyScanSummary, MigrationImportSummary, MigrationState, MigrationStatus,
16 SkippedLegacyRecord,
17};
18use crate::paths::{
19 collapse_home_path, get_backend_managed_remote_agent_key_path,
20 get_backend_managed_remote_agent_known_hosts_path, get_legacy_config_path, get_legacy_root_dir,
21 path_to_string,
22};
23use crate::project_discovery::discover_projects_from_roots;
24use crate::project_repository::{infer_project_metadata, ProjectMetadata, ProjectRepository};
25use crate::review_dispatch_repository::ReviewDispatchRepository;
26use crate::review_repository::ReviewRepository;
27use crate::task_repository::FileTaskRepository;
28use crate::time_utils::parse_iso_8601_millis;
29use crate::types::{
30 Priority, ReviewRecord, ReviewRunRecord, Status, Task, TaskDispatchRecord, TaskSource,
31};
32
33const LEGACY_ISSUES_DIR_NAME: &str = "issues";
34const LEGACY_REVIEWS_DIR_NAME: &str = "reviews";
35const LEGACY_REMOTE_AGENT_DIR_NAME: &str = "remote-agent";
36const LEGACY_PROJECT_METADATA_FILE_NAME: &str = "PROJECT.md";
37
38pub struct MigrationService {
39 database: DatabaseContext,
40 remote_agent_config_service: RemoteAgentConfigService,
41 legacy_config_service: ConfigService,
42 legacy_root: PathBuf,
43 project_repository: ProjectRepository,
44 task_repository: FileTaskRepository,
45 dispatch_repository: DispatchRepository,
46 review_repository: ReviewRepository,
47 review_dispatch_repository: ReviewDispatchRepository,
48}
49
50impl MigrationService {
51 pub fn new(
52 remote_agent_config_service: RemoteAgentConfigService,
53 project_repository: ProjectRepository,
54 task_repository: FileTaskRepository,
55 dispatch_repository: DispatchRepository,
56 review_repository: ReviewRepository,
57 review_dispatch_repository: ReviewDispatchRepository,
58 ) -> Result<Self, TrackError> {
59 Ok(Self {
60 database: project_repository.database_context(),
61 remote_agent_config_service,
62 legacy_config_service: ConfigService::new(Some(get_legacy_config_path()?))?,
63 legacy_root: get_legacy_root_dir()?,
64 project_repository,
65 task_repository,
66 dispatch_repository,
67 review_repository,
68 review_dispatch_repository,
69 })
70 }
71
72 pub fn status(&self) -> Result<MigrationStatus, TrackError> {
73 let saved = self.remote_agent_config_service.load_migration_status()?;
74 if saved.state == MigrationState::Imported {
77 return Ok(saved);
78 }
79
80 if !self.database_is_empty()? {
81 return Ok(MigrationStatus::ready());
82 }
83
84 let snapshot = self.scan_legacy()?;
85 if !snapshot.legacy_detected {
86 return Ok(MigrationStatus::ready());
87 }
88
89 Ok(MigrationStatus {
90 state: MigrationState::ImportRequired,
91 requires_migration: true,
92 can_import: true,
93 legacy_detected: true,
94 summary: snapshot.summary,
95 skipped_records: snapshot.skipped_records,
96 cleanup_candidates: snapshot.cleanup_candidates,
97 })
98 }
99
100 pub fn import_legacy(&self) -> Result<MigrationImportSummary, TrackError> {
101 if !self.database_is_empty()? {
102 return Err(TrackError::new(
103 ErrorCode::MigrationFailed,
104 "The backend already contains data, so legacy import is only allowed into an empty SQLite database.",
105 ));
106 }
107
108 let snapshot = self.scan_legacy()?;
109 if !snapshot.legacy_detected {
110 return Err(TrackError::new(
111 ErrorCode::MigrationFailed,
112 "No legacy track data was found to import.",
113 ));
114 }
115
116 let imported_projects = snapshot.projects.clone();
117 let imported_aliases = snapshot.aliases_by_project.clone();
118 let imported_tasks = snapshot.tasks.clone();
119 let imported_reviews = snapshot.reviews.clone();
120 let imported_task_dispatches = snapshot.task_dispatches.clone();
121 let imported_review_runs = snapshot.review_runs.clone();
122 let imported_remote_agent_config = snapshot.remote_agent_config.clone();
123 let skipped_records = snapshot.skipped_records.clone();
124 let cleanup_candidates = snapshot.cleanup_candidates.clone();
125 let summary = snapshot.summary.clone();
126 let legacy_root = self.legacy_root.clone();
127
128 self.database.transaction(move |connection| {
129 Box::pin(async move {
130 let mut copied_secret_files = Vec::new();
131 let import_result = async {
132 for project in &imported_projects {
133 let aliases = imported_aliases
134 .get(&project.canonical_name)
135 .cloned()
136 .unwrap_or_default();
137 import_project(connection, project, aliases).await?;
138 }
139
140 for task in &imported_tasks {
141 import_task(connection, task).await?;
142 }
143
144 for review in &imported_reviews {
145 import_review(connection, review).await?;
146 }
147
148 for dispatch in &imported_task_dispatches {
149 import_task_dispatch(connection, dispatch).await?;
150 }
151
152 for review_run in &imported_review_runs {
153 import_review_run(connection, review_run).await?;
154 }
155
156 if let Some(remote_agent) = imported_remote_agent_config.as_ref() {
157 save_backend_setting_json(
158 connection,
159 REMOTE_AGENT_SETTING_KEY,
160 remote_agent,
161 )
162 .await?;
163 }
164
165 copied_secret_files = copy_remote_agent_secret_files(&legacy_root)?;
166
167 let imported_summary = MigrationImportSummary {
168 imported_projects: imported_projects.len(),
169 imported_aliases: imported_aliases.values().map(Vec::len).sum(),
170 imported_tasks: imported_tasks.len(),
171 imported_task_dispatches: imported_task_dispatches.len(),
172 imported_reviews: imported_reviews.len(),
173 imported_review_runs: imported_review_runs.len(),
174 remote_agent_config_imported: imported_remote_agent_config.is_some(),
175 copied_secret_files: copied_secret_files
176 .iter()
177 .map(|path| path_to_string(path))
178 .collect(),
179 skipped_records: skipped_records.clone(),
180 cleanup_candidates: cleanup_candidates.clone(),
181 };
182
183 save_backend_setting_json(
184 connection,
185 crate::migration::MIGRATION_STATUS_SETTING_KEY,
186 &MigrationStatus {
187 state: MigrationState::Imported,
188 requires_migration: false,
189 can_import: false,
190 legacy_detected: true,
191 summary,
192 skipped_records: imported_summary.skipped_records.clone(),
193 cleanup_candidates: imported_summary.cleanup_candidates.clone(),
194 },
195 )
196 .await?;
197
198 Ok(imported_summary)
199 }
200 .await;
201
202 if import_result.is_err() {
203 cleanup_copied_secret_files(&copied_secret_files);
204 }
205
206 import_result
207 })
208 })
209 }
210
211 fn database_is_empty(&self) -> Result<bool, TrackError> {
212 Ok(self.project_repository.list_projects()?.is_empty()
213 && self.task_repository.list_tasks(true, None)?.is_empty()
214 && self.review_repository.list_reviews()?.is_empty()
215 && self
216 .dispatch_repository
217 .list_dispatches(Some(1))?
218 .is_empty()
219 && self
220 .review_dispatch_repository
221 .list_dispatches(Some(1))?
222 .is_empty()
223 && self
224 .remote_agent_config_service
225 .load_remote_agent_config()?
226 .is_none())
227 }
228
229 fn scan_legacy(&self) -> Result<LegacyImportSnapshot, TrackError> {
230 let issues_dir = self.legacy_root.join(LEGACY_ISSUES_DIR_NAME);
231 let reviews_dir = self.legacy_root.join(LEGACY_REVIEWS_DIR_NAME);
232 let task_dispatches_dir = issues_dir.join(".dispatches");
233 let review_dispatches_dir = reviews_dir.join(".dispatches");
234 let legacy_config = load_legacy_config(&self.legacy_config_service);
235 let mut snapshot = LegacyImportSnapshot {
236 legacy_detected: issues_dir.exists()
237 || reviews_dir.exists()
238 || self.legacy_root.join(LEGACY_REMOTE_AGENT_DIR_NAME).exists()
239 || legacy_config.is_some(),
240 aliases_by_project: BTreeMap::new(),
241 projects: Vec::new(),
242 tasks: Vec::new(),
243 task_dispatches: Vec::new(),
244 reviews: Vec::new(),
245 review_runs: Vec::new(),
246 remote_agent_config: legacy_config
247 .as_ref()
248 .and_then(|config| config.remote_agent.clone()),
249 skipped_records: Vec::new(),
250 cleanup_candidates: build_cleanup_candidates(
251 &self.legacy_root,
252 self.legacy_config_service.resolved_path(),
253 ),
254 summary: LegacyScanSummary::default(),
255 };
256
257 if issues_dir.is_dir() {
258 for entry in fs::read_dir(&issues_dir).map_err(|error| {
259 TrackError::new(
260 ErrorCode::MigrationFailed,
261 format!(
262 "Could not read the legacy issues directory at {}: {error}",
263 path_to_string(&issues_dir)
264 ),
265 )
266 })? {
267 let entry = entry.map_err(|error| {
268 TrackError::new(
269 ErrorCode::MigrationFailed,
270 format!(
271 "Could not read a legacy project entry under {}: {error}",
272 path_to_string(&issues_dir)
273 ),
274 )
275 })?;
276 let path = entry.path();
277 if !path.is_dir() {
278 continue;
279 }
280 let Some(project_name) = path.file_name().and_then(|value| value.to_str()) else {
281 continue;
282 };
283 if project_name.starts_with('.') {
284 continue;
285 }
286
287 let canonical_name =
288 match crate::path_component::validate_single_normal_path_component(
289 project_name,
290 "Legacy project name",
291 ErrorCode::InvalidPathComponent,
292 ) {
293 Ok(project_name) => project_name,
294 Err(error) => {
295 snapshot.skipped_records.push(SkippedLegacyRecord {
296 kind: "project".to_owned(),
297 path: path_to_string(&path),
298 error: error.to_string(),
299 });
300 continue;
301 }
302 };
303
304 let metadata = read_legacy_project_metadata(&path).unwrap_or_else(|error| {
305 snapshot.skipped_records.push(SkippedLegacyRecord {
306 kind: "project_metadata".to_owned(),
307 path: path_to_string(&path.join(LEGACY_PROJECT_METADATA_FILE_NAME)),
308 error: error.to_string(),
309 });
310 blank_project_metadata()
311 });
312
313 snapshot.projects.push(LegacyProjectImport {
314 canonical_name: canonical_name.clone(),
315 metadata,
316 });
317
318 for status in [Status::Open, Status::Closed] {
319 let status_dir = path.join(status.as_str());
320 if !status_dir.is_dir() {
321 continue;
322 }
323 for task_entry in fs::read_dir(&status_dir).map_err(|error| {
324 TrackError::new(
325 ErrorCode::MigrationFailed,
326 format!(
327 "Could not read the legacy task directory at {}: {error}",
328 path_to_string(&status_dir)
329 ),
330 )
331 })? {
332 let task_entry = task_entry.map_err(|error| {
333 TrackError::new(
334 ErrorCode::MigrationFailed,
335 format!(
336 "Could not read a legacy task entry under {}: {error}",
337 path_to_string(&status_dir)
338 ),
339 )
340 })?;
341 let task_path = task_entry.path();
342 if !task_path.is_file() {
343 continue;
344 }
345 match read_legacy_task_file(&issues_dir, &task_path) {
346 Ok(task) => snapshot.tasks.push(task),
347 Err(error) => snapshot.skipped_records.push(SkippedLegacyRecord {
348 kind: "task".to_owned(),
349 path: path_to_string(&task_path),
350 error: error.to_string(),
351 }),
352 }
353 }
354 }
355 }
356 }
357
358 if task_dispatches_dir.is_dir() {
359 snapshot.task_dispatches = read_json_directory_tree::<TaskDispatchRecord>(
360 &task_dispatches_dir,
361 "task_dispatch",
362 &mut snapshot.skipped_records,
363 )?;
364 }
365
366 if reviews_dir.is_dir() {
367 snapshot.reviews = read_json_directory_flat::<ReviewRecord>(
368 &reviews_dir,
369 "review",
370 &mut snapshot.skipped_records,
371 )?;
372 }
373
374 if review_dispatches_dir.is_dir() {
375 snapshot.review_runs = read_json_directory_tree::<ReviewRunRecord>(
376 &review_dispatches_dir,
377 "review_run",
378 &mut snapshot.skipped_records,
379 )?;
380 }
381
382 if let Some(config) = legacy_config.as_ref() {
383 merge_discovered_legacy_projects(
384 &mut snapshot,
385 config,
386 self.legacy_config_service.resolved_path(),
387 legacy_home_dir(&self.legacy_root),
388 )?;
389 attach_legacy_project_aliases(&mut snapshot, &config.project_aliases);
390 }
391
392 for aliases in snapshot.aliases_by_project.values_mut() {
395 aliases.sort();
396 aliases.dedup();
397 }
398
399 filter_orphaned_history(&mut snapshot);
400 snapshot
401 .projects
402 .sort_by(|left, right| left.canonical_name.cmp(&right.canonical_name));
403 snapshot.summary.projects_found = snapshot.projects.len();
404 snapshot.summary.aliases_found = snapshot.aliases_by_project.values().map(Vec::len).sum();
405 snapshot.summary.tasks_found = snapshot.tasks.len();
406 snapshot.summary.task_dispatches_found = snapshot.task_dispatches.len();
407 snapshot.summary.reviews_found = snapshot.reviews.len();
408 snapshot.summary.review_runs_found = snapshot.review_runs.len();
409 snapshot.summary.remote_agent_configured = snapshot.remote_agent_config.is_some();
410
411 Ok(snapshot)
412 }
413}
414
415fn filter_orphaned_history(snapshot: &mut LegacyImportSnapshot) {
416 let imported_task_ids = snapshot
417 .tasks
418 .iter()
419 .map(|task| task.id.clone())
420 .collect::<std::collections::BTreeSet<_>>();
421 snapshot.task_dispatches.retain(|dispatch| {
422 if imported_task_ids.contains(&dispatch.task_id) {
423 return true;
424 }
425
426 snapshot.skipped_records.push(SkippedLegacyRecord {
427 kind: "task_dispatch".to_owned(),
428 path: dispatch.dispatch_id.clone(),
429 error: format!(
430 "Task dispatch references missing task {} and cannot be imported.",
431 dispatch.task_id
432 ),
433 });
434 false
435 });
436
437 let imported_review_ids = snapshot
438 .reviews
439 .iter()
440 .map(|review| review.id.clone())
441 .collect::<std::collections::BTreeSet<_>>();
442 snapshot.review_runs.retain(|review_run| {
443 if imported_review_ids.contains(&review_run.review_id) {
444 return true;
445 }
446
447 snapshot.skipped_records.push(SkippedLegacyRecord {
448 kind: "review_run".to_owned(),
449 path: review_run.dispatch_id.clone(),
450 error: format!(
451 "Review run references missing review {} and cannot be imported.",
452 review_run.review_id
453 ),
454 });
455 false
456 });
457}
458
459fn merge_discovered_legacy_projects(
460 snapshot: &mut LegacyImportSnapshot,
461 config: &TrackConfigFile,
462 legacy_config_path: &Path,
463 legacy_home_dir: &Path,
464) -> Result<(), TrackError> {
465 let project_roots = config
471 .project_roots
472 .iter()
473 .map(|value| {
474 resolve_legacy_path_from_config_file(value, legacy_config_path, legacy_home_dir)
475 })
476 .collect::<Result<Vec<_>, _>>()?;
477 let discovered_projects =
478 discover_projects_from_roots(&project_roots, &config.project_aliases)?;
479
480 let mut imported_project_names = snapshot
481 .projects
482 .iter()
483 .map(|project| project.canonical_name.to_lowercase())
484 .collect::<std::collections::BTreeSet<_>>();
485
486 for project in discovered_projects.into_projects() {
487 let project_key = project.canonical_name.to_lowercase();
488 if !imported_project_names.insert(project_key) {
489 continue;
490 }
491
492 snapshot.projects.push(LegacyProjectImport {
493 canonical_name: project.canonical_name.clone(),
494 metadata: infer_project_metadata(&project),
495 });
496 }
497
498 Ok(())
499}
500
501fn attach_legacy_project_aliases(
502 snapshot: &mut LegacyImportSnapshot,
503 configured_aliases: &BTreeMap<String, String>,
504) {
505 let imported_project_names = snapshot
509 .projects
510 .iter()
511 .map(|project| {
512 (
513 project.canonical_name.to_lowercase(),
514 project.canonical_name.clone(),
515 )
516 })
517 .collect::<BTreeMap<_, _>>();
518
519 for (alias, configured_canonical_name) in configured_aliases {
520 if let Some(imported_canonical_name) =
521 imported_project_names.get(&configured_canonical_name.to_lowercase())
522 {
523 snapshot
524 .aliases_by_project
525 .entry(imported_canonical_name.clone())
526 .or_default()
527 .push(alias.clone());
528 continue;
529 }
530
531 snapshot.skipped_records.push(SkippedLegacyRecord {
532 kind: "project_alias".to_owned(),
533 path: format!("{alias} -> {configured_canonical_name}"),
534 error: format!(
535 "Legacy alias {alias} points to {configured_canonical_name}, but that project was not present in legacy issues or discovered from configured project roots and will not be imported."
536 ),
537 });
538 }
539}
540
541fn legacy_home_dir(legacy_root: &Path) -> &Path {
542 legacy_root.parent().unwrap_or(legacy_root)
543}
544
545fn resolve_legacy_path_from_config_file(
546 path_value: &str,
547 file_path: &Path,
548 legacy_home_dir: &Path,
549) -> Result<PathBuf, TrackError> {
550 let base_dir = file_path.parent().ok_or_else(|| {
551 TrackError::new(
552 ErrorCode::InvalidConfig,
553 format!(
554 "Could not resolve a configured path relative to legacy config file {}.",
555 collapse_home_path(file_path)
556 ),
557 )
558 })?;
559
560 let expanded = match path_value {
561 "~" => legacy_home_dir.to_path_buf(),
562 value if value.starts_with("~/") => legacy_home_dir.join(&value[2..]),
563 value => PathBuf::from(value),
564 };
565
566 if expanded.is_absolute() {
567 return Ok(expanded);
568 }
569
570 Ok(base_dir.join(expanded))
571}
572
573#[derive(Debug, Clone)]
574struct LegacyImportSnapshot {
575 legacy_detected: bool,
576 aliases_by_project: BTreeMap<String, Vec<String>>,
577 projects: Vec<LegacyProjectImport>,
578 tasks: Vec<Task>,
579 task_dispatches: Vec<TaskDispatchRecord>,
580 reviews: Vec<ReviewRecord>,
581 review_runs: Vec<ReviewRunRecord>,
582 remote_agent_config: Option<RemoteAgentConfigFile>,
583 skipped_records: Vec<SkippedLegacyRecord>,
584 cleanup_candidates: Vec<CleanupCandidate>,
585 summary: LegacyScanSummary,
586}
587
588#[derive(Debug, Clone)]
589struct LegacyProjectImport {
590 canonical_name: String,
591 metadata: ProjectMetadata,
592}
593
594fn load_legacy_config(config_service: &ConfigService) -> Option<TrackConfigFile> {
595 config_service.load_config_file().ok()
596}
597
598fn build_cleanup_candidates(
599 legacy_root: &Path,
600 legacy_config_path: &Path,
601) -> Vec<CleanupCandidate> {
602 let cleanup_targets = [
603 (
604 legacy_config_path.to_path_buf(),
605 "Legacy shared config replaced by the CLI-only config file.",
606 ),
607 (
608 legacy_root.join(LEGACY_ISSUES_DIR_NAME),
609 "Legacy Markdown tasks were imported into the SQLite backend.",
610 ),
611 (
612 legacy_root.join(LEGACY_REVIEWS_DIR_NAME),
613 "Legacy review records were imported into the SQLite backend.",
614 ),
615 (
616 legacy_root.join(LEGACY_REMOTE_AGENT_DIR_NAME),
617 "Legacy managed remote-agent secrets were copied into backend state.",
618 ),
619 ];
620
621 let mut candidates = Vec::new();
622 for (path, reason) in cleanup_targets {
623 if !path.exists() {
624 continue;
625 }
626
627 candidates.push(CleanupCandidate {
628 path: display_cleanup_candidate_path(&path),
629 reason: reason.to_owned(),
630 });
631 }
632
633 candidates
634}
635
636fn display_cleanup_candidate_path(path: &Path) -> String {
637 let legacy_home_mount = Path::new("/home/track/legacy-home");
638 if let Ok(relative) = path.strip_prefix(legacy_home_mount) {
639 if relative.as_os_str().is_empty() {
640 return "~".to_owned();
641 }
642
643 return format!("~/{}", path_to_string(relative).trim_start_matches('/'));
644 }
645
646 collapse_home_path(path)
647}
648
649fn copy_remote_agent_secret_files(legacy_root: &Path) -> Result<Vec<PathBuf>, TrackError> {
650 let legacy_remote_agent_dir = legacy_root.join(LEGACY_REMOTE_AGENT_DIR_NAME);
651 if !legacy_remote_agent_dir.is_dir() {
652 return Ok(Vec::new());
653 }
654
655 let mut copied = Vec::new();
656 let targets = [
657 (
658 legacy_remote_agent_dir.join("id_ed25519"),
659 get_backend_managed_remote_agent_key_path()?,
660 ),
661 (
662 legacy_remote_agent_dir.join("known_hosts"),
663 get_backend_managed_remote_agent_known_hosts_path()?,
664 ),
665 ];
666
667 for (source, destination) in targets {
668 if !source.exists() {
669 continue;
670 }
671
672 if let Some(parent) = destination.parent() {
673 fs::create_dir_all(parent).map_err(|error| {
674 cleanup_copied_secret_files(&copied);
675 TrackError::new(
676 ErrorCode::MigrationFailed,
677 format!(
678 "Could not create the backend secrets directory at {}: {error}",
679 path_to_string(parent)
680 ),
681 )
682 })?;
683 }
684
685 fs::copy(&source, &destination).map_err(|error| {
686 cleanup_copied_secret_files(&copied);
687 TrackError::new(
688 ErrorCode::MigrationFailed,
689 format!(
690 "Could not copy the legacy secret file from {} to {}: {error}",
691 path_to_string(&source),
692 path_to_string(&destination)
693 ),
694 )
695 })?;
696 copied.push(destination);
697 }
698
699 Ok(copied)
700}
701
702fn cleanup_copied_secret_files(paths: &[PathBuf]) {
703 for path in paths {
704 let _ = fs::remove_file(path);
705 }
706}
707
708async fn import_project(
709 connection: &mut SqliteConnection,
710 project: &LegacyProjectImport,
711 aliases: Vec<String>,
712) -> Result<(), TrackError> {
713 let canonical_name = crate::path_component::validate_single_normal_path_component(
714 &project.canonical_name,
715 "Project canonical name",
716 ErrorCode::InvalidPathComponent,
717 )?;
718
719 sqlx::query(
720 r#"
721 INSERT INTO projects (canonical_name, repo_url, git_url, base_branch, description)
722 VALUES (?1, ?2, ?3, ?4, ?5)
723 ON CONFLICT(canonical_name) DO UPDATE SET
724 repo_url = excluded.repo_url,
725 git_url = excluded.git_url,
726 base_branch = excluded.base_branch,
727 description = excluded.description
728 "#,
729 )
730 .bind(&canonical_name)
731 .bind(&project.metadata.repo_url)
732 .bind(&project.metadata.git_url)
733 .bind(&project.metadata.base_branch)
734 .bind(project.metadata.description.as_deref())
735 .execute(&mut *connection)
736 .await
737 .map_err(|error| {
738 TrackError::new(
739 ErrorCode::ProjectWriteFailed,
740 format!("Could not import project {canonical_name}: {error}"),
741 )
742 })?;
743
744 sqlx::query("DELETE FROM project_aliases WHERE canonical_name = ?1")
745 .bind(&canonical_name)
746 .execute(&mut *connection)
747 .await
748 .map_err(|error| {
749 TrackError::new(
750 ErrorCode::ProjectWriteFailed,
751 format!("Could not replace project aliases for {canonical_name}: {error}"),
752 )
753 })?;
754
755 for alias in aliases {
756 sqlx::query(
757 r#"
758 INSERT INTO project_aliases (canonical_name, alias)
759 VALUES (?1, ?2)
760 "#,
761 )
762 .bind(&canonical_name)
763 .bind(&alias)
764 .execute(&mut *connection)
765 .await
766 .map_err(|error| {
767 TrackError::new(
768 ErrorCode::ProjectWriteFailed,
769 format!("Could not save the alias {alias} for project {canonical_name}: {error}"),
770 )
771 })?;
772 }
773
774 Ok(())
775}
776
777async fn import_task(connection: &mut SqliteConnection, task: &Task) -> Result<(), TrackError> {
778 sqlx::query(
779 r#"
780 INSERT INTO tasks (id, project, priority, status, description, created_at, updated_at, source)
781 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)
782 ON CONFLICT(id) DO UPDATE SET
783 project = excluded.project,
784 priority = excluded.priority,
785 status = excluded.status,
786 description = excluded.description,
787 created_at = excluded.created_at,
788 updated_at = excluded.updated_at,
789 source = excluded.source
790 "#,
791 )
792 .bind(&task.id)
793 .bind(&task.project)
794 .bind(task.priority.as_str())
795 .bind(task.status.as_str())
796 .bind(&task.description)
797 .bind(crate::time_utils::format_iso_8601_millis(task.created_at))
798 .bind(crate::time_utils::format_iso_8601_millis(task.updated_at))
799 .bind(task.source.map(task_source_as_str))
800 .execute(&mut *connection)
801 .await
802 .map_err(|error| {
803 TrackError::new(
804 ErrorCode::TaskWriteFailed,
805 format!("Could not import task {}: {error}", task.id),
806 )
807 })?;
808
809 Ok(())
810}
811
812async fn import_review(
813 connection: &mut SqliteConnection,
814 review: &ReviewRecord,
815) -> Result<(), TrackError> {
816 sqlx::query(
817 r#"
818 INSERT INTO reviews (
819 id, pull_request_url, pull_request_number, pull_request_title,
820 repository_full_name, repo_url, git_url, base_branch, workspace_key,
821 project, main_user, default_review_prompt, extra_instructions,
822 created_at, updated_at
823 )
824 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15)
825 ON CONFLICT(id) DO UPDATE SET
826 pull_request_url = excluded.pull_request_url,
827 pull_request_number = excluded.pull_request_number,
828 pull_request_title = excluded.pull_request_title,
829 repository_full_name = excluded.repository_full_name,
830 repo_url = excluded.repo_url,
831 git_url = excluded.git_url,
832 base_branch = excluded.base_branch,
833 workspace_key = excluded.workspace_key,
834 project = excluded.project,
835 main_user = excluded.main_user,
836 default_review_prompt = excluded.default_review_prompt,
837 extra_instructions = excluded.extra_instructions,
838 created_at = excluded.created_at,
839 updated_at = excluded.updated_at
840 "#,
841 )
842 .bind(&review.id)
843 .bind(&review.pull_request_url)
844 .bind(review.pull_request_number as i64)
845 .bind(&review.pull_request_title)
846 .bind(&review.repository_full_name)
847 .bind(&review.repo_url)
848 .bind(&review.git_url)
849 .bind(&review.base_branch)
850 .bind(&review.workspace_key)
851 .bind(review.project.as_deref())
852 .bind(&review.main_user)
853 .bind(review.default_review_prompt.as_deref())
854 .bind(review.extra_instructions.as_deref())
855 .bind(crate::time_utils::format_iso_8601_millis(review.created_at))
856 .bind(crate::time_utils::format_iso_8601_millis(review.updated_at))
857 .execute(&mut *connection)
858 .await
859 .map_err(|error| {
860 TrackError::new(
861 ErrorCode::TaskWriteFailed,
862 format!("Could not import review {}: {error}", review.id),
863 )
864 })?;
865
866 Ok(())
867}
868
869async fn import_task_dispatch(
870 connection: &mut SqliteConnection,
871 dispatch: &TaskDispatchRecord,
872) -> Result<(), TrackError> {
873 crate::path_component::validate_single_normal_path_component(
874 &dispatch.dispatch_id,
875 "Dispatch id",
876 ErrorCode::InvalidPathComponent,
877 )?;
878 crate::path_component::validate_single_normal_path_component(
879 &dispatch.task_id,
880 "Task id",
881 ErrorCode::InvalidPathComponent,
882 )?;
883
884 sqlx::query(
885 r#"
886 INSERT INTO task_dispatches (
887 dispatch_id, task_id, project, status, created_at, updated_at, finished_at,
888 remote_host, branch_name, worktree_path, pull_request_url, follow_up_request,
889 summary, notes, error_message, review_request_head_oid, review_request_user
890 )
891 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17)
892 ON CONFLICT(dispatch_id) DO UPDATE SET
893 task_id = excluded.task_id,
894 project = excluded.project,
895 status = excluded.status,
896 created_at = excluded.created_at,
897 updated_at = excluded.updated_at,
898 finished_at = excluded.finished_at,
899 remote_host = excluded.remote_host,
900 branch_name = excluded.branch_name,
901 worktree_path = excluded.worktree_path,
902 pull_request_url = excluded.pull_request_url,
903 follow_up_request = excluded.follow_up_request,
904 summary = excluded.summary,
905 notes = excluded.notes,
906 error_message = excluded.error_message,
907 review_request_head_oid = excluded.review_request_head_oid,
908 review_request_user = excluded.review_request_user
909 "#,
910 )
911 .bind(&dispatch.dispatch_id)
912 .bind(&dispatch.task_id)
913 .bind(&dispatch.project)
914 .bind(dispatch.status.as_str())
915 .bind(crate::time_utils::format_iso_8601_millis(
916 dispatch.created_at,
917 ))
918 .bind(crate::time_utils::format_iso_8601_millis(
919 dispatch.updated_at,
920 ))
921 .bind(
922 dispatch
923 .finished_at
924 .map(crate::time_utils::format_iso_8601_millis),
925 )
926 .bind(&dispatch.remote_host)
927 .bind(dispatch.branch_name.as_deref())
928 .bind(dispatch.worktree_path.as_deref())
929 .bind(dispatch.pull_request_url.as_deref())
930 .bind(dispatch.follow_up_request.as_deref())
931 .bind(dispatch.summary.as_deref())
932 .bind(dispatch.notes.as_deref())
933 .bind(dispatch.error_message.as_deref())
934 .bind(dispatch.review_request_head_oid.as_deref())
935 .bind(dispatch.review_request_user.as_deref())
936 .execute(&mut *connection)
937 .await
938 .map_err(|error| {
939 TrackError::new(
940 ErrorCode::DispatchWriteFailed,
941 format!(
942 "Could not import the dispatch record for task {}: {error}",
943 dispatch.task_id
944 ),
945 )
946 })?;
947
948 Ok(())
949}
950
951async fn import_review_run(
952 connection: &mut SqliteConnection,
953 review_run: &ReviewRunRecord,
954) -> Result<(), TrackError> {
955 crate::path_component::validate_single_normal_path_component(
956 &review_run.review_id,
957 "Review id",
958 ErrorCode::InvalidPathComponent,
959 )?;
960 crate::path_component::validate_single_normal_path_component(
961 &review_run.dispatch_id,
962 "Dispatch id",
963 ErrorCode::InvalidPathComponent,
964 )?;
965
966 sqlx::query(
967 r#"
968 INSERT INTO review_runs (
969 dispatch_id, review_id, pull_request_url, repository_full_name,
970 workspace_key, status, created_at, updated_at, finished_at,
971 remote_host, branch_name, worktree_path, follow_up_request,
972 target_head_oid, summary, review_submitted, github_review_id,
973 github_review_url, notes, error_message
974 )
975 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17, ?18, ?19, ?20)
976 ON CONFLICT(dispatch_id) DO UPDATE SET
977 review_id = excluded.review_id,
978 pull_request_url = excluded.pull_request_url,
979 repository_full_name = excluded.repository_full_name,
980 workspace_key = excluded.workspace_key,
981 status = excluded.status,
982 created_at = excluded.created_at,
983 updated_at = excluded.updated_at,
984 finished_at = excluded.finished_at,
985 remote_host = excluded.remote_host,
986 branch_name = excluded.branch_name,
987 worktree_path = excluded.worktree_path,
988 follow_up_request = excluded.follow_up_request,
989 target_head_oid = excluded.target_head_oid,
990 summary = excluded.summary,
991 review_submitted = excluded.review_submitted,
992 github_review_id = excluded.github_review_id,
993 github_review_url = excluded.github_review_url,
994 notes = excluded.notes,
995 error_message = excluded.error_message
996 "#,
997 )
998 .bind(&review_run.dispatch_id)
999 .bind(&review_run.review_id)
1000 .bind(&review_run.pull_request_url)
1001 .bind(&review_run.repository_full_name)
1002 .bind(&review_run.workspace_key)
1003 .bind(review_run.status.as_str())
1004 .bind(crate::time_utils::format_iso_8601_millis(review_run.created_at))
1005 .bind(crate::time_utils::format_iso_8601_millis(review_run.updated_at))
1006 .bind(review_run.finished_at.map(crate::time_utils::format_iso_8601_millis))
1007 .bind(&review_run.remote_host)
1008 .bind(review_run.branch_name.as_deref())
1009 .bind(review_run.worktree_path.as_deref())
1010 .bind(review_run.follow_up_request.as_deref())
1011 .bind(review_run.target_head_oid.as_deref())
1012 .bind(review_run.summary.as_deref())
1013 .bind(review_run.review_submitted as i64)
1014 .bind(review_run.github_review_id.as_deref())
1015 .bind(review_run.github_review_url.as_deref())
1016 .bind(review_run.notes.as_deref())
1017 .bind(review_run.error_message.as_deref())
1018 .execute(&mut *connection)
1019 .await
1020 .map_err(|error| {
1021 TrackError::new(
1022 ErrorCode::DispatchWriteFailed,
1023 format!(
1024 "Could not import the review run record for review {}: {error}",
1025 review_run.review_id
1026 ),
1027 )
1028 })?;
1029
1030 Ok(())
1031}
1032
1033async fn save_backend_setting_json<T>(
1034 connection: &mut SqliteConnection,
1035 key: &str,
1036 value: &T,
1037) -> Result<(), TrackError>
1038where
1039 T: Serialize,
1040{
1041 let serialized = serde_json::to_string(value).map_err(|error| {
1042 TrackError::new(
1043 ErrorCode::InvalidConfig,
1044 format!("Could not serialize backend setting `{key}`: {error}"),
1045 )
1046 })?;
1047
1048 sqlx::query(
1049 r#"
1050 INSERT INTO backend_settings (setting_key, setting_json)
1051 VALUES (?1, ?2)
1052 ON CONFLICT(setting_key) DO UPDATE SET setting_json = excluded.setting_json
1053 "#,
1054 )
1055 .bind(key)
1056 .bind(&serialized)
1057 .execute(&mut *connection)
1058 .await
1059 .map_err(|error| {
1060 TrackError::new(
1061 ErrorCode::TaskWriteFailed,
1062 format!("Could not save backend setting `{key}`: {error}"),
1063 )
1064 })?;
1065
1066 Ok(())
1067}
1068
1069fn task_source_as_str(source: TaskSource) -> &'static str {
1070 match source {
1071 TaskSource::Cli => "cli",
1072 TaskSource::Web => "web",
1073 }
1074}
1075
1076fn read_json_directory_tree<T>(
1077 root: &Path,
1078 kind: &str,
1079 skipped_records: &mut Vec<SkippedLegacyRecord>,
1080) -> Result<Vec<T>, TrackError>
1081where
1082 T: for<'de> Deserialize<'de>,
1083{
1084 let mut records = Vec::new();
1085 for parent in fs::read_dir(root).map_err(|error| {
1086 TrackError::new(
1087 ErrorCode::MigrationFailed,
1088 format!(
1089 "Could not read the legacy directory at {}: {error}",
1090 path_to_string(root)
1091 ),
1092 )
1093 })? {
1094 let parent = parent.map_err(|error| {
1095 TrackError::new(
1096 ErrorCode::MigrationFailed,
1097 format!(
1098 "Could not read a legacy entry under {}: {error}",
1099 path_to_string(root)
1100 ),
1101 )
1102 })?;
1103 let parent_path = parent.path();
1104 if !parent_path.is_dir() {
1105 continue;
1106 }
1107
1108 for child in fs::read_dir(&parent_path).map_err(|error| {
1109 TrackError::new(
1110 ErrorCode::MigrationFailed,
1111 format!(
1112 "Could not read the legacy directory at {}: {error}",
1113 path_to_string(&parent_path)
1114 ),
1115 )
1116 })? {
1117 let child = child.map_err(|error| {
1118 TrackError::new(
1119 ErrorCode::MigrationFailed,
1120 format!(
1121 "Could not read a legacy entry under {}: {error}",
1122 path_to_string(&parent_path)
1123 ),
1124 )
1125 })?;
1126 let child_path = child.path();
1127 if !child_path.is_file() {
1128 continue;
1129 }
1130 match fs::read_to_string(&child_path)
1131 .map_err(|error| {
1132 TrackError::new(
1133 ErrorCode::MigrationFailed,
1134 format!(
1135 "Could not read the legacy file at {}: {error}",
1136 path_to_string(&child_path)
1137 ),
1138 )
1139 })
1140 .and_then(|raw| {
1141 serde_json::from_str::<T>(&raw).map_err(|error| {
1142 TrackError::new(
1143 ErrorCode::MigrationFailed,
1144 format!(
1145 "Legacy JSON at {} is malformed: {error}",
1146 path_to_string(&child_path)
1147 ),
1148 )
1149 })
1150 }) {
1151 Ok(record) => records.push(record),
1152 Err(error) => skipped_records.push(SkippedLegacyRecord {
1153 kind: kind.to_owned(),
1154 path: path_to_string(&child_path),
1155 error: error.to_string(),
1156 }),
1157 }
1158 }
1159 }
1160
1161 Ok(records)
1162}
1163
1164fn read_json_directory_flat<T>(
1165 root: &Path,
1166 kind: &str,
1167 skipped_records: &mut Vec<SkippedLegacyRecord>,
1168) -> Result<Vec<T>, TrackError>
1169where
1170 T: for<'de> Deserialize<'de>,
1171{
1172 let mut records = Vec::new();
1173 for entry in fs::read_dir(root).map_err(|error| {
1174 TrackError::new(
1175 ErrorCode::MigrationFailed,
1176 format!(
1177 "Could not read the legacy directory at {}: {error}",
1178 path_to_string(root)
1179 ),
1180 )
1181 })? {
1182 let entry = entry.map_err(|error| {
1183 TrackError::new(
1184 ErrorCode::MigrationFailed,
1185 format!(
1186 "Could not read a legacy entry under {}: {error}",
1187 path_to_string(root)
1188 ),
1189 )
1190 })?;
1191 let path = entry.path();
1192 if !path.is_file() || path.extension().and_then(|value| value.to_str()) != Some("json") {
1193 continue;
1194 }
1195 match fs::read_to_string(&path)
1196 .map_err(|error| {
1197 TrackError::new(
1198 ErrorCode::MigrationFailed,
1199 format!(
1200 "Could not read the legacy file at {}: {error}",
1201 path_to_string(&path)
1202 ),
1203 )
1204 })
1205 .and_then(|raw| {
1206 serde_json::from_str::<T>(&raw).map_err(|error| {
1207 TrackError::new(
1208 ErrorCode::MigrationFailed,
1209 format!(
1210 "Legacy JSON at {} is malformed: {error}",
1211 path_to_string(&path)
1212 ),
1213 )
1214 })
1215 }) {
1216 Ok(record) => records.push(record),
1217 Err(error) => skipped_records.push(SkippedLegacyRecord {
1218 kind: kind.to_owned(),
1219 path: path_to_string(&path),
1220 error: error.to_string(),
1221 }),
1222 }
1223 }
1224
1225 Ok(records)
1226}
1227
1228fn read_legacy_project_metadata(project_directory: &Path) -> Result<ProjectMetadata, TrackError> {
1229 let metadata_path = project_directory.join(LEGACY_PROJECT_METADATA_FILE_NAME);
1230 if !metadata_path.exists() {
1231 return Ok(blank_project_metadata());
1232 }
1233
1234 let raw_file = fs::read_to_string(&metadata_path).map_err(|error| {
1235 TrackError::new(
1236 ErrorCode::InvalidProjectMetadata,
1237 format!(
1238 "Could not read the legacy project metadata file at {}: {error}",
1239 path_to_string(&metadata_path)
1240 ),
1241 )
1242 })?;
1243 let (frontmatter, body) = split_frontmatter_sections(&raw_file)?;
1244 let parsed =
1245 serde_yaml::from_str::<ParsedProjectMetadataFrontmatter>(frontmatter).map_err(|error| {
1246 TrackError::new(
1247 ErrorCode::InvalidProjectMetadata,
1248 format!(
1249 "Project metadata at {} has invalid YAML frontmatter: {error}",
1250 path_to_string(&metadata_path)
1251 ),
1252 )
1253 })?;
1254
1255 Ok(ProjectMetadata {
1256 repo_url: required_string(parsed.repo_url, "repoUrl", &metadata_path)?,
1257 git_url: required_string(parsed.git_url, "gitUrl", &metadata_path)?,
1258 base_branch: required_string(parsed.base_branch, "baseBranch", &metadata_path)?,
1259 description: (!body.trim().is_empty()).then_some(body.trim().to_owned()),
1260 })
1261}
1262
1263fn read_legacy_task_file(issues_dir: &Path, file_path: &Path) -> Result<Task, TrackError> {
1264 let raw_file = fs::read_to_string(file_path).map_err(|error| {
1265 TrackError::new(
1266 ErrorCode::TaskWriteFailed,
1267 format!(
1268 "Could not read task file at {}: {error}",
1269 path_to_string(file_path)
1270 ),
1271 )
1272 })?;
1273 let path_metadata = parse_legacy_task_path(issues_dir, file_path)?;
1274 let (frontmatter, body) = split_frontmatter_sections(&raw_file)?;
1275 let parsed = serde_yaml::from_str::<ParsedTaskFrontmatter>(frontmatter).map_err(|error| {
1276 TrackError::new(
1277 ErrorCode::InvalidConfigInput,
1278 format!("Could not parse task frontmatter: {error}"),
1279 )
1280 })?;
1281
1282 let created_at = required_timestamp(parsed.created_at, "createdAt")?;
1283 let updated_at = required_timestamp(parsed.updated_at, "updatedAt")?;
1284 let description = body.trim().to_owned();
1285 if description.is_empty() {
1286 return Err(TrackError::new(
1287 ErrorCode::InvalidConfigInput,
1288 "Task Markdown body is empty.",
1289 ));
1290 }
1291
1292 Ok(Task {
1293 id: path_metadata.id,
1294 project: path_metadata.project,
1295 priority: parsed.priority.ok_or_else(|| {
1296 TrackError::new(
1297 ErrorCode::InvalidConfigInput,
1298 "Task frontmatter is missing required field priority.",
1299 )
1300 })?,
1301 status: path_metadata.status,
1302 description,
1303 created_at,
1304 updated_at,
1305 source: parsed.source,
1306 })
1307}
1308
1309fn parse_legacy_task_path(
1310 issues_dir: &Path,
1311 file_path: &Path,
1312) -> Result<LegacyTaskPathMetadata, TrackError> {
1313 let relative_path = file_path.strip_prefix(issues_dir).map_err(|_| {
1314 TrackError::new(
1315 ErrorCode::InvalidConfigInput,
1316 format!(
1317 "Task file path {} is outside the configured data directory.",
1318 path_to_string(file_path)
1319 ),
1320 )
1321 })?;
1322 let mut components = relative_path.components();
1323 let project = component_as_string(components.next(), "project", file_path)?;
1324 let status = parse_status_component(components.next(), file_path)?;
1325 let file_name = component_as_string(components.next(), "task filename", file_path)?;
1326 if components.next().is_some() {
1327 return Err(TrackError::new(
1328 ErrorCode::InvalidConfigInput,
1329 format!(
1330 "Task file path {} does not match the expected project/status/id.md layout.",
1331 path_to_string(file_path)
1332 ),
1333 ));
1334 }
1335 let id = file_name
1336 .strip_suffix(".md")
1337 .map(str::to_owned)
1338 .filter(|value| !value.trim().is_empty())
1339 .ok_or_else(|| {
1340 TrackError::new(
1341 ErrorCode::InvalidConfigInput,
1342 format!(
1343 "Task file path {} is missing the task identifier in its filename.",
1344 path_to_string(file_path)
1345 ),
1346 )
1347 })?;
1348
1349 Ok(LegacyTaskPathMetadata {
1350 id,
1351 project,
1352 status,
1353 })
1354}
1355
1356fn blank_project_metadata() -> ProjectMetadata {
1357 ProjectMetadata {
1358 repo_url: String::new(),
1359 git_url: String::new(),
1360 base_branch: "main".to_owned(),
1361 description: None,
1362 }
1363}
1364
1365#[derive(Debug, Deserialize)]
1366struct ParsedTaskFrontmatter {
1367 priority: Option<Priority>,
1368 #[serde(rename = "createdAt")]
1369 created_at: Option<String>,
1370 #[serde(rename = "updatedAt")]
1371 updated_at: Option<String>,
1372 source: Option<TaskSource>,
1373}
1374
1375#[derive(Debug, Deserialize)]
1376struct ParsedProjectMetadataFrontmatter {
1377 #[serde(rename = "repoUrl")]
1378 repo_url: Option<String>,
1379 #[serde(rename = "gitUrl")]
1380 git_url: Option<String>,
1381 #[serde(rename = "baseBranch")]
1382 base_branch: Option<String>,
1383}
1384
1385#[derive(Debug)]
1386struct LegacyTaskPathMetadata {
1387 id: String,
1388 project: String,
1389 status: Status,
1390}
1391
1392fn required_timestamp(
1393 value: Option<String>,
1394 field_name: &str,
1395) -> Result<OffsetDateTime, TrackError> {
1396 let value = value
1397 .map(|value| value.trim().to_owned())
1398 .filter(|value| !value.is_empty())
1399 .ok_or_else(|| {
1400 TrackError::new(
1401 ErrorCode::InvalidConfigInput,
1402 format!("Task frontmatter is missing required field {field_name}."),
1403 )
1404 })?;
1405
1406 parse_iso_8601_millis(&value).map_err(|error| {
1407 TrackError::new(
1408 ErrorCode::InvalidConfigInput,
1409 format!("Task frontmatter field {field_name} is not a valid timestamp: {error}"),
1410 )
1411 })
1412}
1413
1414fn required_string(
1415 value: Option<String>,
1416 field_name: &str,
1417 file_path: &Path,
1418) -> Result<String, TrackError> {
1419 value
1420 .map(|value| value.trim().to_owned())
1421 .filter(|value| !value.is_empty())
1422 .ok_or_else(|| {
1423 TrackError::new(
1424 ErrorCode::InvalidProjectMetadata,
1425 format!(
1426 "Project metadata at {} is missing required field {field_name}.",
1427 path_to_string(file_path)
1428 ),
1429 )
1430 })
1431}
1432
1433fn split_frontmatter_sections(raw_file: &str) -> Result<(&str, &str), TrackError> {
1434 let Some(after_start) = consume_frontmatter_delimiter(raw_file, 0) else {
1435 return Err(TrackError::new(
1436 ErrorCode::InvalidConfigInput,
1437 "Legacy Markdown file must start with YAML frontmatter.",
1438 ));
1439 };
1440 let Some(end_start) = find_frontmatter_end(raw_file, after_start) else {
1441 return Err(TrackError::new(
1442 ErrorCode::InvalidConfigInput,
1443 "Legacy Markdown file is missing the closing YAML frontmatter delimiter.",
1444 ));
1445 };
1446 let frontmatter = &raw_file[after_start..end_start];
1447 let body_start = consume_frontmatter_delimiter(raw_file, end_start).ok_or_else(|| {
1448 TrackError::new(
1449 ErrorCode::InvalidConfigInput,
1450 "Legacy Markdown file is missing the closing YAML frontmatter delimiter.",
1451 )
1452 })?;
1453 Ok((frontmatter, &raw_file[body_start..]))
1454}
1455
1456fn consume_frontmatter_delimiter(raw_file: &str, offset: usize) -> Option<usize> {
1457 match raw_file.get(offset..)? {
1458 rest if rest.starts_with("---\r\n") => Some(offset + 5),
1459 rest if rest.starts_with("---\n") => Some(offset + 4),
1460 _ => None,
1461 }
1462}
1463
1464fn find_frontmatter_end(raw_file: &str, start: usize) -> Option<usize> {
1465 let bytes = raw_file.as_bytes();
1466 let mut index = start;
1467 while index < bytes.len() {
1468 if (index == 0 || bytes.get(index.wrapping_sub(1)) == Some(&b'\n'))
1469 && raw_file.get(index..)?.starts_with("---")
1470 {
1471 return Some(index);
1472 }
1473 index += 1;
1474 }
1475 None
1476}
1477
1478fn component_as_string(
1479 component: Option<Component<'_>>,
1480 label: &str,
1481 file_path: &Path,
1482) -> Result<String, TrackError> {
1483 component
1484 .and_then(|component| component.as_os_str().to_str())
1485 .map(str::trim)
1486 .filter(|value| !value.is_empty())
1487 .map(str::to_owned)
1488 .ok_or_else(|| {
1489 TrackError::new(
1490 ErrorCode::InvalidConfigInput,
1491 format!(
1492 "Task file path {} is missing the {label} component.",
1493 path_to_string(file_path)
1494 ),
1495 )
1496 })
1497}
1498
1499fn parse_status_component(
1500 component: Option<Component<'_>>,
1501 file_path: &Path,
1502) -> Result<Status, TrackError> {
1503 let raw_status = component_as_string(component, "status", file_path)?;
1504 match raw_status.as_str() {
1505 "open" => Ok(Status::Open),
1506 "closed" => Ok(Status::Closed),
1507 _ => Err(TrackError::new(
1508 ErrorCode::InvalidConfigInput,
1509 format!(
1510 "Task file path {} uses unsupported status directory {}.",
1511 path_to_string(file_path),
1512 raw_status
1513 ),
1514 )),
1515 }
1516}
1517
1518#[cfg(test)]
1519mod tests {
1520 use std::collections::BTreeMap;
1521 use std::path::Path;
1522
1523 use tempfile::TempDir;
1524
1525 use super::{display_cleanup_candidate_path, MigrationService};
1526 use crate::backend_config::RemoteAgentConfigService;
1527 use crate::config::{ConfigService, TrackConfigFile};
1528 use crate::dispatch_repository::DispatchRepository;
1529 use crate::migration::MigrationState;
1530 use crate::paths::get_backend_database_path;
1531 use crate::project_repository::ProjectRepository;
1532 use crate::review_dispatch_repository::ReviewDispatchRepository;
1533 use crate::review_repository::ReviewRepository;
1534 use crate::task_repository::FileTaskRepository;
1535 use crate::test_support::{set_env_var, track_data_env_lock, EnvVarGuard};
1536
1537 struct TestEnvironment {
1538 _env_lock: std::sync::MutexGuard<'static, ()>,
1539 _track_state_dir_guard: EnvVarGuard,
1540 _track_legacy_root_guard: EnvVarGuard,
1541 _track_legacy_config_guard: EnvVarGuard,
1542 }
1543
1544 impl TestEnvironment {
1545 fn new(directory: &TempDir) -> Self {
1546 let env_lock = track_data_env_lock()
1547 .lock()
1548 .unwrap_or_else(|poisoned| poisoned.into_inner());
1549 let backend_state_dir = directory.path().join("backend");
1550 let legacy_root = directory.path().join("legacy-root");
1551 let legacy_config_path = directory.path().join("legacy-config/config.json");
1552
1553 Self {
1554 _env_lock: env_lock,
1555 _track_state_dir_guard: set_env_var("TRACK_STATE_DIR", &backend_state_dir),
1556 _track_legacy_root_guard: set_env_var("TRACK_LEGACY_ROOT", &legacy_root),
1557 _track_legacy_config_guard: set_env_var(
1558 "TRACK_LEGACY_CONFIG_PATH",
1559 &legacy_config_path,
1560 ),
1561 }
1562 }
1563 }
1564
1565 fn migration_service() -> MigrationService {
1566 MigrationService::new(
1567 RemoteAgentConfigService::new(None)
1568 .expect("remote-agent config service should resolve"),
1569 ProjectRepository::new(None).expect("project repository should resolve"),
1570 FileTaskRepository::new(None).expect("task repository should resolve"),
1571 DispatchRepository::new(None).expect("dispatch repository should resolve"),
1572 ReviewRepository::new(None).expect("review repository should resolve"),
1573 ReviewDispatchRepository::new(None).expect("review dispatch repository should resolve"),
1574 )
1575 .expect("migration service should resolve")
1576 }
1577
1578 #[test]
1579 fn import_skips_orphaned_history_instead_of_aborting() {
1580 let directory = TempDir::new().expect("tempdir should be created");
1581 let _environment = TestEnvironment::new(&directory);
1582 let legacy_root = directory.path().join("legacy-root");
1583 let project_dir = legacy_root.join("issues/project-a");
1584 let open_dir = project_dir.join("open");
1585 let task_dispatch_dir = legacy_root.join("issues/.dispatches/project-a");
1586 let review_dir = legacy_root.join("reviews");
1587 let review_run_dir = review_dir.join(".dispatches/project-a");
1588 std::fs::create_dir_all(&open_dir).expect("legacy task directory should exist");
1589 std::fs::create_dir_all(&task_dispatch_dir)
1590 .expect("legacy dispatch directory should exist");
1591 std::fs::create_dir_all(&review_run_dir).expect("legacy review run directory should exist");
1592
1593 std::fs::write(
1594 open_dir.join("20260323-fix-queue-layout.md"),
1595 "---\npriority: high\ncreatedAt: 2026-03-23T12:00:00.000Z\nupdatedAt: 2026-03-23T12:00:00.000Z\nsource: cli\n---\nFix queue layout\n",
1596 )
1597 .expect("valid task should be written");
1598 std::fs::write(
1599 open_dir.join("20260323-bad-task.md"),
1600 "---\npriority: high\ncreatedAt: nope\n---\nBroken task\n",
1601 )
1602 .expect("invalid task should be written");
1603
1604 std::fs::write(
1605 task_dispatch_dir.join("valid.json"),
1606 serde_json::json!({
1607 "dispatchId": "dispatch-valid",
1608 "taskId": "20260323-fix-queue-layout",
1609 "project": "project-a",
1610 "status": "succeeded",
1611 "createdAt": "2026-03-23T12:05:00.000Z",
1612 "updatedAt": "2026-03-23T12:06:00.000Z",
1613 "finishedAt": "2026-03-23T12:06:00.000Z",
1614 "remoteHost": "192.0.2.25",
1615 "branchName": "track/dispatch-valid",
1616 "worktreePath": "/tmp/worktree",
1617 "summary": "Completed."
1618 })
1619 .to_string(),
1620 )
1621 .expect("valid dispatch should be written");
1622 std::fs::write(
1623 task_dispatch_dir.join("orphan.json"),
1624 serde_json::json!({
1625 "dispatchId": "dispatch-orphan",
1626 "taskId": "20260323-bad-task",
1627 "project": "project-a",
1628 "status": "failed",
1629 "createdAt": "2026-03-23T12:07:00.000Z",
1630 "updatedAt": "2026-03-23T12:08:00.000Z",
1631 "finishedAt": "2026-03-23T12:08:00.000Z",
1632 "remoteHost": "192.0.2.25",
1633 "summary": "Failed."
1634 })
1635 .to_string(),
1636 )
1637 .expect("orphan dispatch should be written");
1638
1639 std::fs::write(
1640 review_dir.join("review-1.json"),
1641 serde_json::json!({
1642 "id": "review-1",
1643 "pullRequestUrl": "https://github.com/acme/project-a/pull/42",
1644 "pullRequestNumber": 42,
1645 "pullRequestTitle": "Fix queue layout",
1646 "repositoryFullName": "acme/project-a",
1647 "repoUrl": "https://github.com/acme/project-a",
1648 "gitUrl": "git@github.com:acme/project-a.git",
1649 "baseBranch": "main",
1650 "workspaceKey": "project-a",
1651 "project": "project-a",
1652 "mainUser": "octocat",
1653 "createdAt": "2026-03-26T12:00:00.000Z",
1654 "updatedAt": "2026-03-26T12:00:00.000Z"
1655 })
1656 .to_string(),
1657 )
1658 .expect("review should be written");
1659 std::fs::write(
1660 review_run_dir.join("valid.json"),
1661 serde_json::json!({
1662 "dispatchId": "review-run-valid",
1663 "reviewId": "review-1",
1664 "pullRequestUrl": "https://github.com/acme/project-a/pull/42",
1665 "repositoryFullName": "acme/project-a",
1666 "workspaceKey": "project-a",
1667 "status": "succeeded",
1668 "createdAt": "2026-03-26T12:05:00.000Z",
1669 "updatedAt": "2026-03-26T12:06:00.000Z",
1670 "finishedAt": "2026-03-26T12:06:00.000Z",
1671 "remoteHost": "192.0.2.25",
1672 "branchName": "track-review/review-run-valid",
1673 "worktreePath": "/tmp/review-worktree",
1674 "summary": "Submitted review.",
1675 "reviewSubmitted": true
1676 })
1677 .to_string(),
1678 )
1679 .expect("valid review run should be written");
1680 std::fs::write(
1681 review_run_dir.join("orphan.json"),
1682 serde_json::json!({
1683 "dispatchId": "review-run-orphan",
1684 "reviewId": "review-missing",
1685 "pullRequestUrl": "https://github.com/acme/project-a/pull/43",
1686 "repositoryFullName": "acme/project-a",
1687 "workspaceKey": "project-a",
1688 "status": "failed",
1689 "createdAt": "2026-03-26T12:07:00.000Z",
1690 "updatedAt": "2026-03-26T12:08:00.000Z",
1691 "finishedAt": "2026-03-26T12:08:00.000Z",
1692 "remoteHost": "192.0.2.25",
1693 "summary": "Failed review."
1694 })
1695 .to_string(),
1696 )
1697 .expect("orphan review run should be written");
1698
1699 let service = migration_service();
1700 let status = service.status().expect("migration status should load");
1701 assert!(status.requires_migration);
1702 assert_eq!(status.summary.tasks_found, 1);
1703 assert_eq!(status.summary.task_dispatches_found, 1);
1704 assert_eq!(status.summary.reviews_found, 1);
1705 assert_eq!(status.summary.review_runs_found, 1);
1706 assert!(status
1707 .skipped_records
1708 .iter()
1709 .any(|record| record.kind == "task_dispatch"
1710 && record.error.contains("missing task 20260323-bad-task")));
1711 assert!(status
1712 .skipped_records
1713 .iter()
1714 .any(|record| record.kind == "review_run"
1715 && record.error.contains("missing review review-missing")));
1716
1717 let summary = service
1718 .import_legacy()
1719 .expect("legacy import should succeed");
1720 assert_eq!(summary.imported_tasks, 1);
1721 assert_eq!(summary.imported_task_dispatches, 1);
1722 assert_eq!(summary.imported_reviews, 1);
1723 assert_eq!(summary.imported_review_runs, 1);
1724 assert!(summary
1725 .cleanup_candidates
1726 .iter()
1727 .any(|candidate| candidate.path.ends_with("legacy-root/issues")));
1728
1729 let dispatches = DispatchRepository::new(Some(
1730 get_backend_database_path().expect("database path should resolve"),
1731 ))
1732 .expect("dispatch repository should resolve")
1733 .list_dispatches(None)
1734 .expect("dispatches should list");
1735 assert_eq!(dispatches.len(), 1);
1736
1737 let review_runs = ReviewDispatchRepository::new(Some(
1738 get_backend_database_path().expect("database path should resolve"),
1739 ))
1740 .expect("review dispatch repository should resolve")
1741 .list_dispatches(None)
1742 .expect("review runs should list");
1743 assert_eq!(review_runs.len(), 1);
1744
1745 let post_import_status = service.status().expect("migration status should reload");
1746 assert_eq!(post_import_status.state, MigrationState::Imported);
1747 }
1748
1749 #[test]
1750 fn migrates_configured_projects_without_issues_and_skips_alias_only_targets() {
1751 let directory = TempDir::new().expect("tempdir should be created");
1752 let _environment = TestEnvironment::new(&directory);
1753 let legacy_root = directory.path().join("legacy-root");
1754 let legacy_config_path = directory.path().join("legacy-config/config.json");
1755 let configured_repo_git_dir = directory.path().join("workspace/project-b/.git");
1756 std::fs::create_dir_all(legacy_root.join("issues/project-a/open"))
1757 .expect("legacy project directory should exist");
1758 std::fs::create_dir_all(&configured_repo_git_dir)
1759 .expect("configured project directory should exist");
1760
1761 ConfigService::new(Some(legacy_config_path))
1762 .expect("legacy config service should resolve")
1763 .save_config_file(&TrackConfigFile {
1764 project_roots: vec!["~/workspace".to_owned()],
1765 project_aliases: BTreeMap::from([
1766 ("proj-a".to_owned(), "project-a".to_owned()),
1767 ("proj-b".to_owned(), "Project-B".to_owned()),
1768 ("ghost".to_owned(), "project-missing".to_owned()),
1769 ]),
1770 ..TrackConfigFile::default()
1771 })
1772 .expect("legacy config should save");
1773
1774 let service = migration_service();
1775 let status = service.status().expect("migration status should load");
1776 assert!(status.requires_migration);
1777 assert_eq!(status.summary.projects_found, 2);
1778 assert_eq!(status.summary.aliases_found, 2);
1779 assert!(status
1780 .skipped_records
1781 .iter()
1782 .any(|record| record.kind == "project_alias"
1783 && record.path == "ghost -> project-missing"));
1784
1785 let summary = service
1786 .import_legacy()
1787 .expect("legacy import should succeed");
1788 assert_eq!(summary.imported_projects, 2);
1789 assert_eq!(summary.imported_aliases, 2);
1790 assert!(summary
1791 .skipped_records
1792 .iter()
1793 .any(|record| record.kind == "project_alias"
1794 && record.path == "ghost -> project-missing"));
1795
1796 let imported_projects = ProjectRepository::new(Some(
1797 get_backend_database_path().expect("database path should resolve"),
1798 ))
1799 .expect("project repository should resolve")
1800 .list_projects()
1801 .expect("projects should list");
1802 assert_eq!(imported_projects.len(), 2);
1803 assert_eq!(imported_projects[0].canonical_name, "project-a");
1804 assert_eq!(imported_projects[0].aliases, vec!["proj-a".to_owned()]);
1805 assert_eq!(imported_projects[1].canonical_name, "project-b");
1806 assert_eq!(imported_projects[1].aliases, vec!["proj-b".to_owned()]);
1807 }
1808
1809 #[test]
1810 fn cleanup_candidates_render_compose_mount_paths_as_host_paths() {
1811 assert_eq!(
1812 display_cleanup_candidate_path(Path::new("/home/track/legacy-home/.track/issues")),
1813 "~/.track/issues"
1814 );
1815 assert_eq!(
1816 display_cleanup_candidate_path(Path::new(
1817 "/home/track/legacy-home/.config/track/config.json"
1818 )),
1819 "~/.config/track/config.json"
1820 );
1821 }
1822}