1use std::path::Path;
9
10use colored::Colorize;
11
12use crate::storage::sqlite::SqliteStorage;
13use crate::sync::file::{count_lines, file_size};
14use crate::sync::types::{ExportFileInfo, SyncError, SyncResult, SyncStatus};
15
16const EXPORT_FILES: [&str; 6] = [
18 "sessions.jsonl",
19 "issues.jsonl",
20 "context_items.jsonl",
21 "memories.jsonl",
22 "checkpoints.jsonl",
23 "deletions.jsonl",
24];
25
26pub fn get_sync_status(
41 storage: &SqliteStorage,
42 export_dir: &Path,
43 project_path: &str,
44) -> SyncResult<SyncStatus> {
45 let dirty_sessions = storage
47 .get_dirty_sessions_by_project(project_path)
48 .map_err(|e| SyncError::Database(e.to_string()))?
49 .len();
50 let dirty_issues = storage
51 .get_dirty_issues_by_project(project_path)
52 .map_err(|e| SyncError::Database(e.to_string()))?
53 .len();
54 let dirty_context_items = storage
55 .get_dirty_context_items_by_project(project_path)
56 .map_err(|e| SyncError::Database(e.to_string()))?
57 .len();
58 let pending_deletions = storage
59 .count_pending_deletions(project_path)
60 .map_err(|e| SyncError::Database(e.to_string()))?;
61
62 let counts = storage
64 .get_project_counts(project_path)
65 .map_err(|e| SyncError::Database(e.to_string()))?;
66
67 let mut export_files = Vec::new();
69 for filename in EXPORT_FILES {
70 let path = export_dir.join(filename);
71 if path.exists() {
72 let size = file_size(&path);
73 let line_count = count_lines(&path).unwrap_or(0);
74 export_files.push(ExportFileInfo {
75 name: filename.to_string(),
76 size,
77 line_count,
78 });
79 }
80 }
81
82 let has_export_files = !export_files.is_empty();
83
84 let total_dirty = dirty_sessions + dirty_issues + dirty_context_items;
86 let needs_backfill = total_dirty == 0 && counts.total() > 0 && !has_export_files;
87
88 Ok(SyncStatus {
89 dirty_sessions,
90 dirty_issues,
91 dirty_context_items,
92 pending_deletions,
93 total_sessions: counts.sessions,
94 total_issues: counts.issues,
95 total_context_items: counts.context_items,
96 needs_backfill,
97 has_export_files,
98 export_files,
99 })
100}
101
102pub fn print_status(status: &SyncStatus) {
104 println!("{}", "Sync Status".bold().underline());
105 println!();
106
107 let total_data =
109 status.total_sessions + status.total_issues + status.total_context_items;
110 if total_data > 0 {
111 println!("{}", "Project Data:".blue().bold());
112 if status.total_sessions > 0 {
113 println!(" Sessions: {}", status.total_sessions);
114 }
115 if status.total_issues > 0 {
116 println!(" Issues: {}", status.total_issues);
117 }
118 if status.total_context_items > 0 {
119 println!(" Context Items: {}", status.total_context_items);
120 }
121 println!();
122 }
123
124 let total_dirty =
126 status.dirty_sessions + status.dirty_issues + status.dirty_context_items + status.pending_deletions;
127 if total_dirty > 0 {
128 println!("{}", "Pending Export:".yellow().bold());
129 if status.dirty_sessions > 0 {
130 println!(" Sessions: {}", status.dirty_sessions);
131 }
132 if status.dirty_issues > 0 {
133 println!(" Issues: {}", status.dirty_issues);
134 }
135 if status.dirty_context_items > 0 {
136 println!(" Context Items: {}", status.dirty_context_items);
137 }
138 if status.pending_deletions > 0 {
139 println!(" Deletions: {}", status.pending_deletions);
140 }
141 println!(" {}: {}", "Total".bold(), total_dirty);
142 println!();
143 println!(
144 "{}",
145 "Run 'sc sync export' to export pending changes.".dimmed()
146 );
147 } else if status.needs_backfill {
148 println!(
149 "{}",
150 "First Export Required:".yellow().bold()
151 );
152 println!(
153 " {} records exist but haven't been exported yet.",
154 total_data
155 );
156 println!();
157 println!(
158 "{}",
159 "Run 'sc sync export' to perform initial export (backfill will run automatically).".dimmed()
160 );
161 } else if total_data == 0 {
162 println!("{}", "No data for this project.".dimmed());
163 } else {
164 println!("{}", "No pending changes to export.".green());
165 }
166
167 println!();
168
169 if status.has_export_files {
171 println!("{}", "Export Files:".blue().bold());
172 for file in &status.export_files {
173 let size_str = format_size(file.size);
174 println!(
175 " {} ({}, {} records)",
176 file.name, size_str, file.line_count
177 );
178 }
179 } else {
180 println!("{}", "No export files found.".dimmed());
181 if total_data > 0 {
182 println!(
183 "{}",
184 "Run 'sc sync export' to create initial export.".dimmed()
185 );
186 }
187 }
188}
189
190fn format_size(bytes: u64) -> String {
192 const KB: u64 = 1024;
193 const MB: u64 = 1024 * KB;
194
195 if bytes >= MB {
196 format!("{:.1} MB", bytes as f64 / MB as f64)
197 } else if bytes >= KB {
198 format!("{:.1} KB", bytes as f64 / KB as f64)
199 } else {
200 format!("{} B", bytes)
201 }
202}
203
204#[cfg(test)]
205mod tests {
206 use super::*;
207 use tempfile::TempDir;
208
209 #[test]
210 fn test_format_size() {
211 assert_eq!(format_size(0), "0 B");
212 assert_eq!(format_size(512), "512 B");
213 assert_eq!(format_size(1024), "1.0 KB");
214 assert_eq!(format_size(1536), "1.5 KB");
215 assert_eq!(format_size(1024 * 1024), "1.0 MB");
216 assert_eq!(format_size(1024 * 1024 + 512 * 1024), "1.5 MB");
217 }
218
219 #[test]
220 fn test_get_sync_status_empty() {
221 let temp_dir = TempDir::new().unwrap();
222 let db_path = temp_dir.path().join("test.db");
223 let storage = SqliteStorage::open(&db_path).unwrap();
224 let project_path = "/test/project";
225
226 let status = get_sync_status(&storage, temp_dir.path(), project_path).unwrap();
227
228 assert_eq!(status.dirty_sessions, 0);
229 assert_eq!(status.dirty_issues, 0);
230 assert_eq!(status.dirty_context_items, 0);
231 assert_eq!(status.pending_deletions, 0);
232 assert_eq!(status.total_sessions, 0);
233 assert!(!status.has_export_files);
234 assert!(status.export_files.is_empty());
235 assert!(!status.needs_backfill);
236 }
237
238 #[test]
239 fn test_get_sync_status_with_dirty_records() {
240 let temp_dir = TempDir::new().unwrap();
241 let db_path = temp_dir.path().join("test.db");
242 let mut storage = SqliteStorage::open(&db_path).unwrap();
243 let project_path = "/test/project";
244
245 storage
247 .create_session("sess_1", "Test Session", None, Some(project_path), None, "test")
248 .unwrap();
249
250 let status = get_sync_status(&storage, temp_dir.path(), project_path).unwrap();
251
252 assert_eq!(status.dirty_sessions, 1);
253 assert_eq!(status.total_sessions, 1);
254 assert_eq!(status.dirty_issues, 0);
255 assert!(!status.needs_backfill);
256 }
257
258 #[test]
259 fn test_get_sync_status_with_export_files() {
260 let temp_dir = TempDir::new().unwrap();
261 let db_path = temp_dir.path().join("test.db");
262 let storage = SqliteStorage::open(&db_path).unwrap();
263 let project_path = "/test/project";
264
265 let sessions_path = temp_dir.path().join("sessions.jsonl");
267 std::fs::write(&sessions_path, "{\"type\":\"session\"}\n{\"type\":\"session\"}\n").unwrap();
268
269 let status = get_sync_status(&storage, temp_dir.path(), project_path).unwrap();
270
271 assert!(status.has_export_files);
272 assert_eq!(status.export_files.len(), 1);
273 assert_eq!(status.export_files[0].name, "sessions.jsonl");
274 assert_eq!(status.export_files[0].line_count, 2);
275 }
276
277 #[test]
278 fn test_needs_backfill_detection() {
279 let temp_dir = TempDir::new().unwrap();
280 let db_path = temp_dir.path().join("test.db");
281 let mut storage = SqliteStorage::open(&db_path).unwrap();
282 let project_path = "/test/project";
283
284 storage
286 .create_session("sess_1", "Test Session", None, Some(project_path), None, "test")
287 .unwrap();
288
289 storage.clear_dirty_sessions(&["sess_1".to_string()]).unwrap();
291
292 let status = get_sync_status(&storage, temp_dir.path(), project_path).unwrap();
293
294 assert_eq!(status.total_sessions, 1);
296 assert_eq!(status.dirty_sessions, 0);
297 assert!(status.needs_backfill);
298 }
299}