1pub mod git;
2
3use anyhow::{Context, Result};
4use opensession_api::db::migrations::{LOCAL_MIGRATIONS, MIGRATIONS};
5use opensession_core::session::{is_auxiliary_session, working_directory};
6use opensession_core::trace::Session;
7use rusqlite::{params, Connection, OptionalExtension};
8use serde_json::Value;
9use std::fs;
10use std::io::{BufRead, BufReader};
11use std::path::PathBuf;
12use std::sync::Mutex;
13
14use git::{normalize_repo_name, GitContext};
15
16#[derive(Debug, Clone)]
18pub struct LocalSessionRow {
19 pub id: String,
20 pub source_path: Option<String>,
21 pub sync_status: String,
22 pub last_synced_at: Option<String>,
23 pub user_id: Option<String>,
24 pub nickname: Option<String>,
25 pub team_id: Option<String>,
26 pub tool: String,
27 pub agent_provider: Option<String>,
28 pub agent_model: Option<String>,
29 pub title: Option<String>,
30 pub description: Option<String>,
31 pub tags: Option<String>,
32 pub created_at: String,
33 pub uploaded_at: Option<String>,
34 pub message_count: i64,
35 pub user_message_count: i64,
36 pub task_count: i64,
37 pub event_count: i64,
38 pub duration_seconds: i64,
39 pub total_input_tokens: i64,
40 pub total_output_tokens: i64,
41 pub git_remote: Option<String>,
42 pub git_branch: Option<String>,
43 pub git_commit: Option<String>,
44 pub git_repo_name: Option<String>,
45 pub pr_number: Option<i64>,
46 pub pr_url: Option<String>,
47 pub working_directory: Option<String>,
48 pub files_modified: Option<String>,
49 pub files_read: Option<String>,
50 pub has_errors: bool,
51 pub max_active_agents: i64,
52 pub is_auxiliary: bool,
53}
54
55fn infer_tool_from_source_path(source_path: Option<&str>) -> Option<&'static str> {
56 let source_path = source_path.map(|path| path.to_ascii_lowercase())?;
57
58 if source_path.contains("/.codex/sessions/")
59 || source_path.contains("\\.codex\\sessions\\")
60 || source_path.contains("/codex/sessions/")
61 || source_path.contains("\\codex\\sessions\\")
62 {
63 return Some("codex");
64 }
65
66 if source_path.contains("/.claude/projects/")
67 || source_path.contains("\\.claude\\projects\\")
68 || source_path.contains("/claude/projects/")
69 || source_path.contains("\\claude\\projects\\")
70 {
71 return Some("claude-code");
72 }
73
74 None
75}
76
77fn normalize_tool_for_source_path(current_tool: &str, source_path: Option<&str>) -> String {
78 infer_tool_from_source_path(source_path)
79 .unwrap_or(current_tool)
80 .to_string()
81}
82
83fn normalize_non_empty(value: Option<&str>) -> Option<String> {
84 value
85 .map(str::trim)
86 .filter(|value| !value.is_empty())
87 .map(ToOwned::to_owned)
88}
89
90fn json_object_string(value: &Value, keys: &[&str]) -> Option<String> {
91 let obj = value.as_object()?;
92 for key in keys {
93 if let Some(found) = obj.get(*key).and_then(Value::as_str) {
94 let normalized = found.trim();
95 if !normalized.is_empty() {
96 return Some(normalized.to_string());
97 }
98 }
99 }
100 None
101}
102
103fn git_context_from_session_attributes(session: &Session) -> GitContext {
104 let attrs = &session.context.attributes;
105
106 let mut remote = normalize_non_empty(attrs.get("git_remote").and_then(Value::as_str));
107 let mut branch = normalize_non_empty(attrs.get("git_branch").and_then(Value::as_str));
108 let mut commit = normalize_non_empty(attrs.get("git_commit").and_then(Value::as_str));
109 let mut repo_name = normalize_non_empty(attrs.get("git_repo_name").and_then(Value::as_str));
110
111 if let Some(git_value) = attrs.get("git") {
112 if remote.is_none() {
113 remote = json_object_string(
114 git_value,
115 &["remote", "repository_url", "repo_url", "origin", "url"],
116 );
117 }
118 if branch.is_none() {
119 branch = json_object_string(
120 git_value,
121 &["branch", "git_branch", "current_branch", "ref", "head"],
122 );
123 }
124 if commit.is_none() {
125 commit = json_object_string(git_value, &["commit", "commit_hash", "sha", "git_commit"]);
126 }
127 if repo_name.is_none() {
128 repo_name = json_object_string(git_value, &["repo_name", "repository", "repo", "name"]);
129 }
130 }
131
132 if repo_name.is_none() {
133 repo_name = remote
134 .as_deref()
135 .and_then(normalize_repo_name)
136 .map(ToOwned::to_owned);
137 }
138
139 GitContext {
140 remote,
141 branch,
142 commit,
143 repo_name,
144 }
145}
146
147fn git_context_has_any_field(git: &GitContext) -> bool {
148 git.remote.is_some() || git.branch.is_some() || git.commit.is_some() || git.repo_name.is_some()
149}
150
151fn merge_git_context(preferred: &GitContext, fallback: &GitContext) -> GitContext {
152 GitContext {
153 remote: preferred.remote.clone().or_else(|| fallback.remote.clone()),
154 branch: preferred.branch.clone().or_else(|| fallback.branch.clone()),
155 commit: preferred.commit.clone().or_else(|| fallback.commit.clone()),
156 repo_name: preferred
157 .repo_name
158 .clone()
159 .or_else(|| fallback.repo_name.clone()),
160 }
161}
162
163#[derive(Debug, Clone)]
165pub struct LocalSessionFilter {
166 pub team_id: Option<String>,
167 pub sync_status: Option<String>,
168 pub git_repo_name: Option<String>,
169 pub search: Option<String>,
170 pub exclude_low_signal: bool,
171 pub tool: Option<String>,
172 pub sort: LocalSortOrder,
173 pub time_range: LocalTimeRange,
174 pub limit: Option<u32>,
175 pub offset: Option<u32>,
176}
177
178impl Default for LocalSessionFilter {
179 fn default() -> Self {
180 Self {
181 team_id: None,
182 sync_status: None,
183 git_repo_name: None,
184 search: None,
185 exclude_low_signal: false,
186 tool: None,
187 sort: LocalSortOrder::Recent,
188 time_range: LocalTimeRange::All,
189 limit: None,
190 offset: None,
191 }
192 }
193}
194
195#[derive(Debug, Clone, Default, PartialEq, Eq)]
197pub enum LocalSortOrder {
198 #[default]
199 Recent,
200 Popular,
201 Longest,
202}
203
204#[derive(Debug, Clone, Default, PartialEq, Eq)]
206pub enum LocalTimeRange {
207 Hours24,
208 Days7,
209 Days30,
210 #[default]
211 All,
212}
213
214#[derive(Debug, Clone)]
216pub struct RemoteSessionSummary {
217 pub id: String,
218 pub user_id: Option<String>,
219 pub nickname: Option<String>,
220 pub team_id: String,
221 pub tool: String,
222 pub agent_provider: Option<String>,
223 pub agent_model: Option<String>,
224 pub title: Option<String>,
225 pub description: Option<String>,
226 pub tags: Option<String>,
227 pub created_at: String,
228 pub uploaded_at: String,
229 pub message_count: i64,
230 pub task_count: i64,
231 pub event_count: i64,
232 pub duration_seconds: i64,
233 pub total_input_tokens: i64,
234 pub total_output_tokens: i64,
235 pub git_remote: Option<String>,
236 pub git_branch: Option<String>,
237 pub git_commit: Option<String>,
238 pub git_repo_name: Option<String>,
239 pub pr_number: Option<i64>,
240 pub pr_url: Option<String>,
241 pub working_directory: Option<String>,
242 pub files_modified: Option<String>,
243 pub files_read: Option<String>,
244 pub has_errors: bool,
245 pub max_active_agents: i64,
246}
247
248#[derive(Debug, Default)]
250pub struct LogFilter {
251 pub tool: Option<String>,
253 pub model: Option<String>,
255 pub since: Option<String>,
257 pub before: Option<String>,
259 pub touches: Option<String>,
261 pub grep: Option<String>,
263 pub has_errors: Option<bool>,
265 pub working_directory: Option<String>,
267 pub git_repo_name: Option<String>,
269 pub limit: Option<u32>,
271 pub offset: Option<u32>,
273}
274
275const FROM_CLAUSE: &str = "\
277FROM sessions s \
278LEFT JOIN session_sync ss ON ss.session_id = s.id \
279LEFT JOIN users u ON u.id = s.user_id";
280
281pub struct LocalDb {
285 conn: Mutex<Connection>,
286}
287
288impl LocalDb {
289 pub fn open() -> Result<Self> {
292 let path = default_db_path()?;
293 Self::open_path(&path)
294 }
295
296 pub fn open_path(path: &PathBuf) -> Result<Self> {
298 if let Some(parent) = path.parent() {
299 std::fs::create_dir_all(parent)
300 .with_context(|| format!("create dir for {}", path.display()))?;
301 }
302 let conn = open_connection_with_latest_schema(path)
303 .with_context(|| format!("open local db {}", path.display()))?;
304 Ok(Self {
305 conn: Mutex::new(conn),
306 })
307 }
308
309 fn conn(&self) -> std::sync::MutexGuard<'_, Connection> {
310 self.conn.lock().expect("local db mutex poisoned")
311 }
312
313 pub fn upsert_local_session(
316 &self,
317 session: &Session,
318 source_path: &str,
319 git: &GitContext,
320 ) -> Result<()> {
321 let title = session.context.title.as_deref();
322 let description = session.context.description.as_deref();
323 let tags = if session.context.tags.is_empty() {
324 None
325 } else {
326 Some(session.context.tags.join(","))
327 };
328 let created_at = session.context.created_at.to_rfc3339();
329 let cwd = working_directory(session).map(String::from);
330 let is_auxiliary = is_auxiliary_session(session);
331
332 let (files_modified, files_read, has_errors) =
334 opensession_core::extract::extract_file_metadata(session);
335 let max_active_agents = opensession_core::agent_metrics::max_active_agents(session) as i64;
336 let normalized_tool =
337 normalize_tool_for_source_path(&session.agent.tool, Some(source_path));
338 let git_from_session = git_context_from_session_attributes(session);
339 let has_session_git = git_context_has_any_field(&git_from_session);
340 let merged_git = merge_git_context(&git_from_session, git);
341
342 let conn = self.conn();
343 conn.execute(
345 "INSERT INTO sessions \
346 (id, team_id, tool, agent_provider, agent_model, \
347 title, description, tags, created_at, \
348 message_count, user_message_count, task_count, event_count, duration_seconds, \
349 total_input_tokens, total_output_tokens, body_storage_key, \
350 git_remote, git_branch, git_commit, git_repo_name, working_directory, \
351 files_modified, files_read, has_errors, max_active_agents, is_auxiliary) \
352 VALUES (?1,'personal',?2,?3,?4,?5,?6,?7,?8,?9,?10,?11,?12,?13,?14,?15,'',?16,?17,?18,?19,?20,?21,?22,?23,?24,?25) \
353 ON CONFLICT(id) DO UPDATE SET \
354 tool=excluded.tool, agent_provider=excluded.agent_provider, \
355 agent_model=excluded.agent_model, \
356 title=excluded.title, description=excluded.description, \
357 tags=excluded.tags, \
358 message_count=excluded.message_count, user_message_count=excluded.user_message_count, \
359 task_count=excluded.task_count, \
360 event_count=excluded.event_count, duration_seconds=excluded.duration_seconds, \
361 total_input_tokens=excluded.total_input_tokens, \
362 total_output_tokens=excluded.total_output_tokens, \
363 git_remote=CASE WHEN ?26=1 THEN excluded.git_remote ELSE COALESCE(git_remote, excluded.git_remote) END, \
364 git_branch=CASE WHEN ?26=1 THEN excluded.git_branch ELSE COALESCE(git_branch, excluded.git_branch) END, \
365 git_commit=CASE WHEN ?26=1 THEN excluded.git_commit ELSE COALESCE(git_commit, excluded.git_commit) END, \
366 git_repo_name=CASE WHEN ?26=1 THEN excluded.git_repo_name ELSE COALESCE(git_repo_name, excluded.git_repo_name) END, \
367 working_directory=excluded.working_directory, \
368 files_modified=excluded.files_modified, files_read=excluded.files_read, \
369 has_errors=excluded.has_errors, \
370 max_active_agents=excluded.max_active_agents, \
371 is_auxiliary=excluded.is_auxiliary",
372 params![
373 &session.session_id,
374 &normalized_tool,
375 &session.agent.provider,
376 &session.agent.model,
377 title,
378 description,
379 &tags,
380 &created_at,
381 session.stats.message_count as i64,
382 session.stats.user_message_count as i64,
383 session.stats.task_count as i64,
384 session.stats.event_count as i64,
385 session.stats.duration_seconds as i64,
386 session.stats.total_input_tokens as i64,
387 session.stats.total_output_tokens as i64,
388 &merged_git.remote,
389 &merged_git.branch,
390 &merged_git.commit,
391 &merged_git.repo_name,
392 &cwd,
393 &files_modified,
394 &files_read,
395 has_errors,
396 max_active_agents,
397 is_auxiliary as i64,
398 has_session_git as i64,
399 ],
400 )?;
401
402 conn.execute(
403 "INSERT INTO session_sync (session_id, source_path, sync_status) \
404 VALUES (?1, ?2, 'local_only') \
405 ON CONFLICT(session_id) DO UPDATE SET source_path=excluded.source_path",
406 params![&session.session_id, source_path],
407 )?;
408 Ok(())
409 }
410
411 pub fn upsert_remote_session(&self, summary: &RemoteSessionSummary) -> Result<()> {
414 let conn = self.conn();
415 conn.execute(
417 "INSERT INTO sessions \
418 (id, user_id, team_id, tool, agent_provider, agent_model, \
419 title, description, tags, created_at, uploaded_at, \
420 message_count, task_count, event_count, duration_seconds, \
421 total_input_tokens, total_output_tokens, body_storage_key, \
422 git_remote, git_branch, git_commit, git_repo_name, \
423 pr_number, pr_url, working_directory, \
424 files_modified, files_read, has_errors, max_active_agents, is_auxiliary) \
425 VALUES (?1,?2,?3,?4,?5,?6,?7,?8,?9,?10,?11,?12,?13,?14,?15,?16,?17,'',?18,?19,?20,?21,?22,?23,?24,?25,?26,?27,?28,0) \
426 ON CONFLICT(id) DO UPDATE SET \
427 title=excluded.title, description=excluded.description, \
428 tags=excluded.tags, uploaded_at=excluded.uploaded_at, \
429 message_count=excluded.message_count, task_count=excluded.task_count, \
430 event_count=excluded.event_count, duration_seconds=excluded.duration_seconds, \
431 total_input_tokens=excluded.total_input_tokens, \
432 total_output_tokens=excluded.total_output_tokens, \
433 git_remote=excluded.git_remote, git_branch=excluded.git_branch, \
434 git_commit=excluded.git_commit, git_repo_name=excluded.git_repo_name, \
435 pr_number=excluded.pr_number, pr_url=excluded.pr_url, \
436 working_directory=excluded.working_directory, \
437 files_modified=excluded.files_modified, files_read=excluded.files_read, \
438 has_errors=excluded.has_errors, \
439 max_active_agents=excluded.max_active_agents, \
440 is_auxiliary=excluded.is_auxiliary",
441 params![
442 &summary.id,
443 &summary.user_id,
444 &summary.team_id,
445 &summary.tool,
446 &summary.agent_provider,
447 &summary.agent_model,
448 &summary.title,
449 &summary.description,
450 &summary.tags,
451 &summary.created_at,
452 &summary.uploaded_at,
453 summary.message_count,
454 summary.task_count,
455 summary.event_count,
456 summary.duration_seconds,
457 summary.total_input_tokens,
458 summary.total_output_tokens,
459 &summary.git_remote,
460 &summary.git_branch,
461 &summary.git_commit,
462 &summary.git_repo_name,
463 summary.pr_number,
464 &summary.pr_url,
465 &summary.working_directory,
466 &summary.files_modified,
467 &summary.files_read,
468 summary.has_errors,
469 summary.max_active_agents,
470 ],
471 )?;
472
473 conn.execute(
474 "INSERT INTO session_sync (session_id, sync_status) \
475 VALUES (?1, 'remote_only') \
476 ON CONFLICT(session_id) DO UPDATE SET \
477 sync_status = CASE WHEN session_sync.sync_status = 'local_only' THEN 'synced' ELSE session_sync.sync_status END",
478 params![&summary.id],
479 )?;
480 Ok(())
481 }
482
483 fn build_local_session_where_clause(
486 filter: &LocalSessionFilter,
487 ) -> (String, Vec<Box<dyn rusqlite::types::ToSql>>) {
488 let mut where_clauses = vec![
489 "1=1".to_string(),
490 "COALESCE(s.is_auxiliary, 0) = 0".to_string(),
491 ];
492 let mut param_values: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
493 let mut idx = 1u32;
494
495 if let Some(ref team_id) = filter.team_id {
496 where_clauses.push(format!("s.team_id = ?{idx}"));
497 param_values.push(Box::new(team_id.clone()));
498 idx += 1;
499 }
500
501 if let Some(ref sync_status) = filter.sync_status {
502 where_clauses.push(format!("COALESCE(ss.sync_status, 'unknown') = ?{idx}"));
503 param_values.push(Box::new(sync_status.clone()));
504 idx += 1;
505 }
506
507 if let Some(ref repo) = filter.git_repo_name {
508 where_clauses.push(format!("s.git_repo_name = ?{idx}"));
509 param_values.push(Box::new(repo.clone()));
510 idx += 1;
511 }
512
513 if let Some(ref tool) = filter.tool {
514 where_clauses.push(format!("s.tool = ?{idx}"));
515 param_values.push(Box::new(tool.clone()));
516 idx += 1;
517 }
518
519 if let Some(ref search) = filter.search {
520 let like = format!("%{search}%");
521 where_clauses.push(format!(
522 "(s.title LIKE ?{i1} OR s.description LIKE ?{i2} OR s.tags LIKE ?{i3})",
523 i1 = idx,
524 i2 = idx + 1,
525 i3 = idx + 2,
526 ));
527 param_values.push(Box::new(like.clone()));
528 param_values.push(Box::new(like.clone()));
529 param_values.push(Box::new(like));
530 idx += 3;
531 }
532
533 if filter.exclude_low_signal {
534 where_clauses.push(
535 "NOT (COALESCE(s.message_count, 0) = 0 \
536 AND COALESCE(s.user_message_count, 0) = 0 \
537 AND COALESCE(s.task_count, 0) = 0 \
538 AND COALESCE(s.event_count, 0) <= 2 \
539 AND (s.title IS NULL OR TRIM(s.title) = ''))"
540 .to_string(),
541 );
542 }
543
544 let interval = match filter.time_range {
545 LocalTimeRange::Hours24 => Some("-1 day"),
546 LocalTimeRange::Days7 => Some("-7 days"),
547 LocalTimeRange::Days30 => Some("-30 days"),
548 LocalTimeRange::All => None,
549 };
550 if let Some(interval) = interval {
551 where_clauses.push(format!("datetime(s.created_at) >= datetime('now', ?{idx})"));
552 param_values.push(Box::new(interval.to_string()));
553 }
554
555 (where_clauses.join(" AND "), param_values)
556 }
557
558 pub fn list_sessions(&self, filter: &LocalSessionFilter) -> Result<Vec<LocalSessionRow>> {
559 let (where_str, mut param_values) = Self::build_local_session_where_clause(filter);
560 let order_clause = match filter.sort {
561 LocalSortOrder::Popular => "s.message_count DESC, s.created_at DESC",
562 LocalSortOrder::Longest => "s.duration_seconds DESC, s.created_at DESC",
563 LocalSortOrder::Recent => "s.created_at DESC",
564 };
565
566 let mut sql = format!(
567 "SELECT {LOCAL_SESSION_COLUMNS} \
568 {FROM_CLAUSE} WHERE {where_str} \
569 ORDER BY {order_clause}"
570 );
571
572 if let Some(limit) = filter.limit {
573 sql.push_str(" LIMIT ?");
574 param_values.push(Box::new(limit));
575 if let Some(offset) = filter.offset {
576 sql.push_str(" OFFSET ?");
577 param_values.push(Box::new(offset));
578 }
579 }
580
581 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
582 param_values.iter().map(|p| p.as_ref()).collect();
583 let conn = self.conn();
584 let mut stmt = conn.prepare(&sql)?;
585 let rows = stmt.query_map(param_refs.as_slice(), row_to_local_session)?;
586
587 let mut result = Vec::new();
588 for row in rows {
589 result.push(row?);
590 }
591
592 Ok(result)
593 }
594
595 pub fn count_sessions_filtered(&self, filter: &LocalSessionFilter) -> Result<i64> {
597 let mut count_filter = filter.clone();
598 count_filter.limit = None;
599 count_filter.offset = None;
600 let (where_str, param_values) = Self::build_local_session_where_clause(&count_filter);
601 let sql = format!("SELECT COUNT(*) {FROM_CLAUSE} WHERE {where_str}");
602 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
603 param_values.iter().map(|p| p.as_ref()).collect();
604 let conn = self.conn();
605 let count = conn.query_row(&sql, param_refs.as_slice(), |row| row.get(0))?;
606 Ok(count)
607 }
608
609 pub fn list_session_tools(&self, filter: &LocalSessionFilter) -> Result<Vec<String>> {
611 let mut tool_filter = filter.clone();
612 tool_filter.tool = None;
613 tool_filter.limit = None;
614 tool_filter.offset = None;
615 let (where_str, param_values) = Self::build_local_session_where_clause(&tool_filter);
616 let sql = format!(
617 "SELECT DISTINCT s.tool \
618 {FROM_CLAUSE} WHERE {where_str} \
619 ORDER BY s.tool ASC"
620 );
621 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
622 param_values.iter().map(|p| p.as_ref()).collect();
623 let conn = self.conn();
624 let mut stmt = conn.prepare(&sql)?;
625 let rows = stmt.query_map(param_refs.as_slice(), |row| row.get::<_, String>(0))?;
626
627 let mut tools = Vec::new();
628 for row in rows {
629 let tool = row?;
630 if !tool.trim().is_empty() {
631 tools.push(tool);
632 }
633 }
634 Ok(tools)
635 }
636
637 pub fn list_sessions_log(&self, filter: &LogFilter) -> Result<Vec<LocalSessionRow>> {
641 let mut where_clauses = vec![
642 "1=1".to_string(),
643 "COALESCE(s.is_auxiliary, 0) = 0".to_string(),
644 ];
645 let mut param_values: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
646 let mut idx = 1u32;
647
648 if let Some(ref tool) = filter.tool {
649 where_clauses.push(format!("s.tool = ?{idx}"));
650 param_values.push(Box::new(tool.clone()));
651 idx += 1;
652 }
653
654 if let Some(ref model) = filter.model {
655 let like = model.replace('*', "%");
656 where_clauses.push(format!("s.agent_model LIKE ?{idx}"));
657 param_values.push(Box::new(like));
658 idx += 1;
659 }
660
661 if let Some(ref since) = filter.since {
662 where_clauses.push(format!("s.created_at >= ?{idx}"));
663 param_values.push(Box::new(since.clone()));
664 idx += 1;
665 }
666
667 if let Some(ref before) = filter.before {
668 where_clauses.push(format!("s.created_at < ?{idx}"));
669 param_values.push(Box::new(before.clone()));
670 idx += 1;
671 }
672
673 if let Some(ref touches) = filter.touches {
674 let like = format!("%\"{touches}\"%");
675 where_clauses.push(format!("s.files_modified LIKE ?{idx}"));
676 param_values.push(Box::new(like));
677 idx += 1;
678 }
679
680 if let Some(ref grep) = filter.grep {
681 let like = format!("%{grep}%");
682 where_clauses.push(format!(
683 "(s.title LIKE ?{i1} OR s.description LIKE ?{i2} OR s.tags LIKE ?{i3})",
684 i1 = idx,
685 i2 = idx + 1,
686 i3 = idx + 2,
687 ));
688 param_values.push(Box::new(like.clone()));
689 param_values.push(Box::new(like.clone()));
690 param_values.push(Box::new(like));
691 idx += 3;
692 }
693
694 if let Some(true) = filter.has_errors {
695 where_clauses.push("s.has_errors = 1".to_string());
696 }
697
698 if let Some(ref wd) = filter.working_directory {
699 where_clauses.push(format!("s.working_directory LIKE ?{idx}"));
700 param_values.push(Box::new(format!("{wd}%")));
701 idx += 1;
702 }
703
704 if let Some(ref repo) = filter.git_repo_name {
705 where_clauses.push(format!("s.git_repo_name = ?{idx}"));
706 param_values.push(Box::new(repo.clone()));
707 idx += 1;
708 }
709
710 let _ = idx; let where_str = where_clauses.join(" AND ");
713 let mut sql = format!(
714 "SELECT {LOCAL_SESSION_COLUMNS} \
715 {FROM_CLAUSE} WHERE {where_str} \
716 ORDER BY s.created_at DESC"
717 );
718
719 if let Some(limit) = filter.limit {
720 sql.push_str(" LIMIT ?");
721 param_values.push(Box::new(limit));
722 if let Some(offset) = filter.offset {
723 sql.push_str(" OFFSET ?");
724 param_values.push(Box::new(offset));
725 }
726 }
727
728 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
729 param_values.iter().map(|p| p.as_ref()).collect();
730 let conn = self.conn();
731 let mut stmt = conn.prepare(&sql)?;
732 let rows = stmt.query_map(param_refs.as_slice(), row_to_local_session)?;
733
734 let mut result = Vec::new();
735 for row in rows {
736 result.push(row?);
737 }
738 Ok(result)
739 }
740
741 pub fn get_sessions_by_tool_latest(
743 &self,
744 tool: &str,
745 count: u32,
746 ) -> Result<Vec<LocalSessionRow>> {
747 let sql = format!(
748 "SELECT {LOCAL_SESSION_COLUMNS} \
749 {FROM_CLAUSE} WHERE s.tool = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
750 ORDER BY s.created_at DESC"
751 );
752 let conn = self.conn();
753 let mut stmt = conn.prepare(&sql)?;
754 let rows = stmt.query_map(params![tool], row_to_local_session)?;
755 let mut result = Vec::new();
756 for row in rows {
757 result.push(row?);
758 }
759
760 result.truncate(count as usize);
761 Ok(result)
762 }
763
764 pub fn get_sessions_latest(&self, count: u32) -> Result<Vec<LocalSessionRow>> {
766 let sql = format!(
767 "SELECT {LOCAL_SESSION_COLUMNS} \
768 {FROM_CLAUSE} WHERE COALESCE(s.is_auxiliary, 0) = 0 \
769 ORDER BY s.created_at DESC"
770 );
771 let conn = self.conn();
772 let mut stmt = conn.prepare(&sql)?;
773 let rows = stmt.query_map([], row_to_local_session)?;
774 let mut result = Vec::new();
775 for row in rows {
776 result.push(row?);
777 }
778
779 result.truncate(count as usize);
780 Ok(result)
781 }
782
783 pub fn get_session_by_tool_offset(
785 &self,
786 tool: &str,
787 offset: u32,
788 ) -> Result<Option<LocalSessionRow>> {
789 let sql = format!(
790 "SELECT {LOCAL_SESSION_COLUMNS} \
791 {FROM_CLAUSE} WHERE s.tool = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
792 ORDER BY s.created_at DESC"
793 );
794 let conn = self.conn();
795 let mut stmt = conn.prepare(&sql)?;
796 let rows = stmt.query_map(params![tool], row_to_local_session)?;
797 let result = rows.collect::<Result<Vec<_>, _>>()?;
798 Ok(result.into_iter().nth(offset as usize))
799 }
800
801 pub fn get_session_by_offset(&self, offset: u32) -> Result<Option<LocalSessionRow>> {
803 let sql = format!(
804 "SELECT {LOCAL_SESSION_COLUMNS} \
805 {FROM_CLAUSE} WHERE COALESCE(s.is_auxiliary, 0) = 0 \
806 ORDER BY s.created_at DESC"
807 );
808 let conn = self.conn();
809 let mut stmt = conn.prepare(&sql)?;
810 let rows = stmt.query_map([], row_to_local_session)?;
811 let result = rows.collect::<Result<Vec<_>, _>>()?;
812 Ok(result.into_iter().nth(offset as usize))
813 }
814
815 pub fn get_session_source_path(&self, session_id: &str) -> Result<Option<String>> {
817 let conn = self.conn();
818 let result = conn
819 .query_row(
820 "SELECT source_path FROM session_sync WHERE session_id = ?1",
821 params![session_id],
822 |row| row.get(0),
823 )
824 .optional()?;
825
826 Ok(result)
827 }
828
829 pub fn session_count(&self) -> Result<i64> {
831 let count = self
832 .conn()
833 .query_row("SELECT COUNT(*) FROM sessions", [], |row| row.get(0))?;
834 Ok(count)
835 }
836
837 pub fn delete_session(&self, session_id: &str) -> Result<()> {
840 let conn = self.conn();
841 conn.execute(
842 "DELETE FROM body_cache WHERE session_id = ?1",
843 params![session_id],
844 )?;
845 conn.execute(
846 "DELETE FROM session_sync WHERE session_id = ?1",
847 params![session_id],
848 )?;
849 conn.execute("DELETE FROM sessions WHERE id = ?1", params![session_id])?;
850 Ok(())
851 }
852
853 pub fn get_sync_cursor(&self, team_id: &str) -> Result<Option<String>> {
856 let cursor = self
857 .conn()
858 .query_row(
859 "SELECT cursor FROM sync_cursors WHERE team_id = ?1",
860 params![team_id],
861 |row| row.get(0),
862 )
863 .optional()?;
864 Ok(cursor)
865 }
866
867 pub fn set_sync_cursor(&self, team_id: &str, cursor: &str) -> Result<()> {
868 self.conn().execute(
869 "INSERT INTO sync_cursors (team_id, cursor, updated_at) \
870 VALUES (?1, ?2, datetime('now')) \
871 ON CONFLICT(team_id) DO UPDATE SET cursor=excluded.cursor, updated_at=datetime('now')",
872 params![team_id, cursor],
873 )?;
874 Ok(())
875 }
876
877 pub fn pending_uploads(&self, team_id: &str) -> Result<Vec<LocalSessionRow>> {
881 let sql = format!(
882 "SELECT {LOCAL_SESSION_COLUMNS} \
883 FROM sessions s \
884 INNER JOIN session_sync ss ON ss.session_id = s.id \
885 LEFT JOIN users u ON u.id = s.user_id \
886 WHERE ss.sync_status = 'local_only' AND s.team_id = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
887 ORDER BY s.created_at ASC"
888 );
889 let conn = self.conn();
890 let mut stmt = conn.prepare(&sql)?;
891 let rows = stmt.query_map(params![team_id], row_to_local_session)?;
892 let mut result = Vec::new();
893 for row in rows {
894 result.push(row?);
895 }
896 Ok(result)
897 }
898
899 pub fn mark_synced(&self, session_id: &str) -> Result<()> {
900 self.conn().execute(
901 "UPDATE session_sync SET sync_status = 'synced', last_synced_at = datetime('now') \
902 WHERE session_id = ?1",
903 params![session_id],
904 )?;
905 Ok(())
906 }
907
908 pub fn was_uploaded_after(
910 &self,
911 source_path: &str,
912 modified: &chrono::DateTime<chrono::Utc>,
913 ) -> Result<bool> {
914 let result: Option<String> = self
915 .conn()
916 .query_row(
917 "SELECT last_synced_at FROM session_sync \
918 WHERE source_path = ?1 AND sync_status = 'synced' AND last_synced_at IS NOT NULL",
919 params![source_path],
920 |row| row.get(0),
921 )
922 .optional()?;
923
924 if let Some(synced_at) = result {
925 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&synced_at) {
926 return Ok(dt >= *modified);
927 }
928 }
929 Ok(false)
930 }
931
932 pub fn cache_body(&self, session_id: &str, body: &[u8]) -> Result<()> {
935 self.conn().execute(
936 "INSERT INTO body_cache (session_id, body, cached_at) \
937 VALUES (?1, ?2, datetime('now')) \
938 ON CONFLICT(session_id) DO UPDATE SET body=excluded.body, cached_at=datetime('now')",
939 params![session_id, body],
940 )?;
941 Ok(())
942 }
943
944 pub fn get_cached_body(&self, session_id: &str) -> Result<Option<Vec<u8>>> {
945 let body = self
946 .conn()
947 .query_row(
948 "SELECT body FROM body_cache WHERE session_id = ?1",
949 params![session_id],
950 |row| row.get(0),
951 )
952 .optional()?;
953 Ok(body)
954 }
955
956 pub fn find_active_session_for_repo(
960 &self,
961 repo_path: &str,
962 since_minutes: u32,
963 ) -> Result<Option<LocalSessionRow>> {
964 let sql = format!(
965 "SELECT {LOCAL_SESSION_COLUMNS} \
966 {FROM_CLAUSE} \
967 WHERE s.working_directory LIKE ?1 \
968 AND COALESCE(s.is_auxiliary, 0) = 0 \
969 AND s.created_at >= datetime('now', ?2) \
970 ORDER BY s.created_at DESC LIMIT 1"
971 );
972 let since = format!("-{since_minutes} minutes");
973 let like = format!("{repo_path}%");
974 let conn = self.conn();
975 let mut stmt = conn.prepare(&sql)?;
976 let row = stmt
977 .query_map(params![like, since], row_to_local_session)?
978 .next()
979 .transpose()?;
980 Ok(row)
981 }
982
983 pub fn existing_session_ids(&self) -> std::collections::HashSet<String> {
985 let conn = self.conn();
986 let mut stmt = conn
987 .prepare("SELECT id FROM sessions")
988 .unwrap_or_else(|_| panic!("failed to prepare existing_session_ids query"));
989 let rows = stmt.query_map([], |row| row.get::<_, String>(0));
990 let mut set = std::collections::HashSet::new();
991 if let Ok(rows) = rows {
992 for row in rows.flatten() {
993 set.insert(row);
994 }
995 }
996 set
997 }
998
999 pub fn update_session_stats(&self, session: &Session) -> Result<()> {
1001 let title = session.context.title.as_deref();
1002 let description = session.context.description.as_deref();
1003 let (files_modified, files_read, has_errors) =
1004 opensession_core::extract::extract_file_metadata(session);
1005 let max_active_agents = opensession_core::agent_metrics::max_active_agents(session) as i64;
1006 let is_auxiliary = is_auxiliary_session(session);
1007
1008 self.conn().execute(
1009 "UPDATE sessions SET \
1010 title=?2, description=?3, \
1011 message_count=?4, user_message_count=?5, task_count=?6, \
1012 event_count=?7, duration_seconds=?8, \
1013 total_input_tokens=?9, total_output_tokens=?10, \
1014 files_modified=?11, files_read=?12, has_errors=?13, \
1015 max_active_agents=?14, is_auxiliary=?15 \
1016 WHERE id=?1",
1017 params![
1018 &session.session_id,
1019 title,
1020 description,
1021 session.stats.message_count as i64,
1022 session.stats.user_message_count as i64,
1023 session.stats.task_count as i64,
1024 session.stats.event_count as i64,
1025 session.stats.duration_seconds as i64,
1026 session.stats.total_input_tokens as i64,
1027 session.stats.total_output_tokens as i64,
1028 &files_modified,
1029 &files_read,
1030 has_errors,
1031 max_active_agents,
1032 is_auxiliary as i64,
1033 ],
1034 )?;
1035 Ok(())
1036 }
1037
1038 pub fn set_session_sync_path(&self, session_id: &str, source_path: &str) -> Result<()> {
1040 self.conn().execute(
1041 "INSERT INTO session_sync (session_id, source_path) \
1042 VALUES (?1, ?2) \
1043 ON CONFLICT(session_id) DO UPDATE SET source_path = excluded.source_path",
1044 params![session_id, source_path],
1045 )?;
1046 Ok(())
1047 }
1048
1049 pub fn list_repos(&self) -> Result<Vec<String>> {
1051 let conn = self.conn();
1052 let mut stmt = conn.prepare(
1053 "SELECT DISTINCT git_repo_name FROM sessions \
1054 WHERE git_repo_name IS NOT NULL AND COALESCE(is_auxiliary, 0) = 0 \
1055 ORDER BY git_repo_name ASC",
1056 )?;
1057 let rows = stmt.query_map([], |row| row.get(0))?;
1058 let mut result = Vec::new();
1059 for row in rows {
1060 result.push(row?);
1061 }
1062 Ok(result)
1063 }
1064
1065 pub fn list_working_directories(&self) -> Result<Vec<String>> {
1067 let conn = self.conn();
1068 let mut stmt = conn.prepare(
1069 "SELECT DISTINCT working_directory FROM sessions \
1070 WHERE working_directory IS NOT NULL AND TRIM(working_directory) <> '' \
1071 AND COALESCE(is_auxiliary, 0) = 0 \
1072 ORDER BY working_directory ASC",
1073 )?;
1074 let rows = stmt.query_map([], |row| row.get(0))?;
1075 let mut result = Vec::new();
1076 for row in rows {
1077 result.push(row?);
1078 }
1079 Ok(result)
1080 }
1081}
1082
1083fn open_connection_with_latest_schema(path: &PathBuf) -> Result<Connection> {
1086 let conn = Connection::open(path).with_context(|| format!("open db {}", path.display()))?;
1087 conn.execute_batch("PRAGMA journal_mode=WAL;")?;
1088
1089 conn.execute_batch("PRAGMA foreign_keys=OFF;")?;
1091
1092 apply_local_migrations(&conn)?;
1093 repair_session_tools_from_source_path(&conn)?;
1094 repair_auxiliary_flags_from_source_path(&conn)?;
1095 validate_local_schema(&conn)?;
1096
1097 Ok(conn)
1098}
1099
1100fn apply_local_migrations(conn: &Connection) -> Result<()> {
1101 conn.execute_batch(
1102 "CREATE TABLE IF NOT EXISTS _migrations (
1103 id INTEGER PRIMARY KEY,
1104 name TEXT NOT NULL UNIQUE,
1105 applied_at TEXT NOT NULL DEFAULT (datetime('now'))
1106 );",
1107 )
1108 .context("create _migrations table for local db")?;
1109
1110 for (name, sql) in MIGRATIONS.iter().chain(LOCAL_MIGRATIONS.iter()) {
1111 let already_applied: bool = conn
1112 .query_row(
1113 "SELECT COUNT(*) > 0 FROM _migrations WHERE name = ?1",
1114 [name],
1115 |row| row.get(0),
1116 )
1117 .unwrap_or(false);
1118
1119 if already_applied {
1120 continue;
1121 }
1122
1123 conn.execute_batch(sql)
1124 .with_context(|| format!("apply local migration {name}"))?;
1125
1126 conn.execute(
1127 "INSERT OR IGNORE INTO _migrations (name) VALUES (?1)",
1128 [name],
1129 )
1130 .with_context(|| format!("record local migration {name}"))?;
1131 }
1132
1133 Ok(())
1134}
1135
1136fn validate_local_schema(conn: &Connection) -> Result<()> {
1137 let sql = format!("SELECT {LOCAL_SESSION_COLUMNS} {FROM_CLAUSE} WHERE 1=0");
1138 conn.prepare(&sql)
1139 .map(|_| ())
1140 .context("validate local session schema")
1141}
1142
1143fn repair_session_tools_from_source_path(conn: &Connection) -> Result<()> {
1144 let mut stmt = conn.prepare(
1145 "SELECT s.id, s.tool, ss.source_path \
1146 FROM sessions s \
1147 LEFT JOIN session_sync ss ON ss.session_id = s.id \
1148 WHERE ss.source_path IS NOT NULL",
1149 )?;
1150 let rows = stmt.query_map([], |row| {
1151 Ok((
1152 row.get::<_, String>(0)?,
1153 row.get::<_, String>(1)?,
1154 row.get::<_, Option<String>>(2)?,
1155 ))
1156 })?;
1157
1158 let mut updates: Vec<(String, String)> = Vec::new();
1159 for row in rows {
1160 let (id, current_tool, source_path) = row?;
1161 let normalized = normalize_tool_for_source_path(¤t_tool, source_path.as_deref());
1162 if normalized != current_tool {
1163 updates.push((id, normalized));
1164 }
1165 }
1166 drop(stmt);
1167
1168 for (id, tool) in updates {
1169 conn.execute(
1170 "UPDATE sessions SET tool = ?1 WHERE id = ?2",
1171 params![tool, id],
1172 )?;
1173 }
1174
1175 Ok(())
1176}
1177
1178fn repair_auxiliary_flags_from_source_path(conn: &Connection) -> Result<()> {
1179 let mut stmt = conn.prepare(
1180 "SELECT s.id, ss.source_path \
1181 FROM sessions s \
1182 LEFT JOIN session_sync ss ON ss.session_id = s.id \
1183 WHERE ss.source_path IS NOT NULL \
1184 AND COALESCE(s.is_auxiliary, 0) = 0",
1185 )?;
1186 let rows = stmt.query_map([], |row| {
1187 Ok((row.get::<_, String>(0)?, row.get::<_, Option<String>>(1)?))
1188 })?;
1189
1190 let mut updates: Vec<String> = Vec::new();
1191 for row in rows {
1192 let (id, source_path) = row?;
1193 let Some(source_path) = source_path else {
1194 continue;
1195 };
1196 if infer_tool_from_source_path(Some(&source_path)) != Some("codex") {
1197 continue;
1198 }
1199 if is_codex_auxiliary_source_file(&source_path) {
1200 updates.push(id);
1201 }
1202 }
1203 drop(stmt);
1204
1205 for id in updates {
1206 conn.execute(
1207 "UPDATE sessions SET is_auxiliary = 1 WHERE id = ?1",
1208 params![id],
1209 )?;
1210 }
1211
1212 Ok(())
1213}
1214
1215fn is_codex_auxiliary_source_file(source_path: &str) -> bool {
1216 let Ok(file) = fs::File::open(source_path) else {
1217 return false;
1218 };
1219 let mut reader = BufReader::new(file);
1220 let mut first_line = String::new();
1221 if reader.read_line(&mut first_line).is_err() {
1222 return false;
1223 }
1224
1225 let line = first_line.trim();
1226 if line.is_empty() {
1227 return false;
1228 }
1229
1230 line.contains("\"source\":{\"subagent\"")
1231 || line.contains("\"source\": {\"subagent\"")
1232 || line.contains("\"agent_role\":\"awaiter\"")
1233 || line.contains("\"agent_role\":\"worker\"")
1234 || line.contains("\"agent_role\":\"explorer\"")
1235 || line.contains("\"agent_role\":\"subagent\"")
1236}
1237
1238pub const LOCAL_SESSION_COLUMNS: &str = "\
1240s.id, ss.source_path, COALESCE(ss.sync_status, 'unknown') AS sync_status, ss.last_synced_at, \
1241s.user_id, u.nickname, s.team_id, s.tool, s.agent_provider, s.agent_model, \
1242s.title, s.description, s.tags, s.created_at, s.uploaded_at, \
1243s.message_count, COALESCE(s.user_message_count, 0), s.task_count, s.event_count, s.duration_seconds, \
1244s.total_input_tokens, s.total_output_tokens, \
1245s.git_remote, s.git_branch, s.git_commit, s.git_repo_name, \
1246s.pr_number, s.pr_url, s.working_directory, \
1247s.files_modified, s.files_read, s.has_errors, COALESCE(s.max_active_agents, 1), COALESCE(s.is_auxiliary, 0)";
1248
1249fn row_to_local_session(row: &rusqlite::Row) -> rusqlite::Result<LocalSessionRow> {
1250 let source_path: Option<String> = row.get(1)?;
1251 let tool: String = row.get(7)?;
1252 let normalized_tool = normalize_tool_for_source_path(&tool, source_path.as_deref());
1253
1254 Ok(LocalSessionRow {
1255 id: row.get(0)?,
1256 source_path,
1257 sync_status: row.get(2)?,
1258 last_synced_at: row.get(3)?,
1259 user_id: row.get(4)?,
1260 nickname: row.get(5)?,
1261 team_id: row.get(6)?,
1262 tool: normalized_tool,
1263 agent_provider: row.get(8)?,
1264 agent_model: row.get(9)?,
1265 title: row.get(10)?,
1266 description: row.get(11)?,
1267 tags: row.get(12)?,
1268 created_at: row.get(13)?,
1269 uploaded_at: row.get(14)?,
1270 message_count: row.get(15)?,
1271 user_message_count: row.get(16)?,
1272 task_count: row.get(17)?,
1273 event_count: row.get(18)?,
1274 duration_seconds: row.get(19)?,
1275 total_input_tokens: row.get(20)?,
1276 total_output_tokens: row.get(21)?,
1277 git_remote: row.get(22)?,
1278 git_branch: row.get(23)?,
1279 git_commit: row.get(24)?,
1280 git_repo_name: row.get(25)?,
1281 pr_number: row.get(26)?,
1282 pr_url: row.get(27)?,
1283 working_directory: row.get(28)?,
1284 files_modified: row.get(29)?,
1285 files_read: row.get(30)?,
1286 has_errors: row.get::<_, i64>(31).unwrap_or(0) != 0,
1287 max_active_agents: row.get(32).unwrap_or(1),
1288 is_auxiliary: row.get::<_, i64>(33).unwrap_or(0) != 0,
1289 })
1290}
1291
1292fn default_db_path() -> Result<PathBuf> {
1293 let home = std::env::var("HOME")
1294 .or_else(|_| std::env::var("USERPROFILE"))
1295 .context("Could not determine home directory")?;
1296 Ok(PathBuf::from(home)
1297 .join(".local")
1298 .join("share")
1299 .join("opensession")
1300 .join("local.db"))
1301}
1302
1303#[cfg(test)]
1304mod tests {
1305 use super::*;
1306
1307 use std::fs::{create_dir_all, write};
1308 use tempfile::tempdir;
1309
1310 fn test_db() -> LocalDb {
1311 let dir = tempdir().unwrap();
1312 let path = dir.keep().join("test.db");
1313 LocalDb::open_path(&path).unwrap()
1314 }
1315
1316 #[test]
1317 fn test_open_and_schema() {
1318 let _db = test_db();
1319 }
1320
1321 #[test]
1322 fn test_open_repairs_codex_tool_hint_from_source_path() {
1323 let dir = tempfile::tempdir().unwrap();
1324 let path = dir.path().join("repair.db");
1325
1326 {
1327 let _ = LocalDb::open_path(&path).unwrap();
1328 }
1329
1330 {
1331 let conn = Connection::open(&path).unwrap();
1332 conn.execute(
1333 "INSERT INTO sessions (id, team_id, tool, created_at, body_storage_key) VALUES (?1, 'personal', 'claude-code', ?2, '')",
1334 params!["rollout-repair", "2026-02-20T00:00:00Z"],
1335 )
1336 .unwrap();
1337 conn.execute(
1338 "INSERT INTO session_sync (session_id, source_path, sync_status) VALUES (?1, ?2, 'local_only')",
1339 params!["rollout-repair", "/Users/test/.codex/sessions/2026/02/20/rollout-repair.jsonl"],
1340 )
1341 .unwrap();
1342 }
1343
1344 let db = LocalDb::open_path(&path).unwrap();
1345 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1346 let row = rows
1347 .iter()
1348 .find(|row| row.id == "rollout-repair")
1349 .expect("repaired row");
1350 assert_eq!(row.tool, "codex");
1351 }
1352
1353 #[test]
1354 fn test_open_repairs_codex_auxiliary_flag_from_source_path() {
1355 let dir = tempfile::tempdir().unwrap();
1356 let path = dir.path().join("repair-auxiliary.db");
1357 let codex_dir = dir
1358 .path()
1359 .join(".codex")
1360 .join("sessions")
1361 .join("2026")
1362 .join("02")
1363 .join("20");
1364 create_dir_all(&codex_dir).unwrap();
1365 let source_path = codex_dir.join("rollout-subagent.jsonl");
1366 write(
1367 &source_path,
1368 r#"{"timestamp":"2026-02-20T00:00:00.000Z","type":"session_meta","payload":{"id":"rollout-subagent","timestamp":"2026-02-20T00:00:00.000Z","cwd":"/tmp","originator":"Codex Desktop","cli_version":"0.105.0","source":{"subagent":{"thread_spawn":{"parent_thread_id":"parent-session-id","depth":1,"agent_role":"awaiter"}}},"agent_role":"awaiter"}}\n"#,
1369 )
1370 .unwrap();
1371
1372 {
1373 let _ = LocalDb::open_path(&path).unwrap();
1374 }
1375
1376 {
1377 let conn = Connection::open(&path).unwrap();
1378 conn.execute(
1379 "INSERT INTO sessions (id, team_id, tool, created_at, body_storage_key, is_auxiliary) VALUES (?1, 'personal', 'codex', ?2, '', 0)",
1380 params!["rollout-subagent", "2026-02-20T00:00:00Z"],
1381 )
1382 .unwrap();
1383 conn.execute(
1384 "INSERT INTO session_sync (session_id, source_path, sync_status) VALUES (?1, ?2, 'local_only')",
1385 params!["rollout-subagent", source_path.to_string_lossy().to_string()],
1386 )
1387 .unwrap();
1388 }
1389
1390 let db = LocalDb::open_path(&path).unwrap();
1391 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1392 assert!(
1393 rows.iter().all(|row| row.id != "rollout-subagent"),
1394 "auxiliary codex session should be hidden after repair"
1395 );
1396 }
1397
1398 #[test]
1399 fn test_upsert_local_session_normalizes_tool_from_source_path() {
1400 let db = test_db();
1401 let mut session = Session::new(
1402 "rollout-upsert".to_string(),
1403 opensession_core::trace::Agent {
1404 provider: "openai".to_string(),
1405 model: "gpt-5".to_string(),
1406 tool: "claude-code".to_string(),
1407 tool_version: None,
1408 },
1409 );
1410 session.stats.event_count = 1;
1411
1412 db.upsert_local_session(
1413 &session,
1414 "/Users/test/.codex/sessions/2026/02/20/rollout-upsert.jsonl",
1415 &crate::git::GitContext::default(),
1416 )
1417 .unwrap();
1418
1419 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1420 let row = rows
1421 .iter()
1422 .find(|row| row.id == "rollout-upsert")
1423 .expect("upserted row");
1424 assert_eq!(row.tool, "codex");
1425 }
1426
1427 #[test]
1428 fn test_upsert_local_session_preserves_existing_git_when_session_has_no_git_metadata() {
1429 let db = test_db();
1430 let mut session = Session::new(
1431 "preserve-git".to_string(),
1432 opensession_core::trace::Agent {
1433 provider: "openai".to_string(),
1434 model: "gpt-5".to_string(),
1435 tool: "codex".to_string(),
1436 tool_version: None,
1437 },
1438 );
1439 session.stats.event_count = 1;
1440
1441 let first_git = crate::git::GitContext {
1442 remote: Some("https://github.com/acme/repo.git".to_string()),
1443 branch: Some("feature/original".to_string()),
1444 commit: Some("1111111".to_string()),
1445 repo_name: Some("acme/repo".to_string()),
1446 };
1447 db.upsert_local_session(
1448 &session,
1449 "/Users/test/.codex/sessions/2026/02/20/preserve-git.jsonl",
1450 &first_git,
1451 )
1452 .unwrap();
1453
1454 let second_git = crate::git::GitContext {
1455 remote: Some("https://github.com/acme/repo.git".to_string()),
1456 branch: Some("feature/current-head".to_string()),
1457 commit: Some("2222222".to_string()),
1458 repo_name: Some("acme/repo".to_string()),
1459 };
1460 db.upsert_local_session(
1461 &session,
1462 "/Users/test/.codex/sessions/2026/02/20/preserve-git.jsonl",
1463 &second_git,
1464 )
1465 .unwrap();
1466
1467 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1468 let row = rows
1469 .iter()
1470 .find(|row| row.id == "preserve-git")
1471 .expect("row exists");
1472 assert_eq!(row.git_branch.as_deref(), Some("feature/original"));
1473 assert_eq!(row.git_commit.as_deref(), Some("1111111"));
1474 }
1475
1476 #[test]
1477 fn test_upsert_local_session_prefers_git_branch_from_session_attributes() {
1478 let db = test_db();
1479 let mut session = Session::new(
1480 "session-git-branch".to_string(),
1481 opensession_core::trace::Agent {
1482 provider: "anthropic".to_string(),
1483 model: "claude-opus-4-6".to_string(),
1484 tool: "claude-code".to_string(),
1485 tool_version: None,
1486 },
1487 );
1488 session.stats.event_count = 1;
1489 session.context.attributes.insert(
1490 "git_branch".to_string(),
1491 serde_json::Value::String("from-session".to_string()),
1492 );
1493
1494 let fallback_git = crate::git::GitContext {
1495 remote: Some("https://github.com/acme/repo.git".to_string()),
1496 branch: Some("fallback-branch".to_string()),
1497 commit: Some("aaaaaaaa".to_string()),
1498 repo_name: Some("acme/repo".to_string()),
1499 };
1500 db.upsert_local_session(
1501 &session,
1502 "/Users/test/.claude/projects/foo/session-git-branch.jsonl",
1503 &fallback_git,
1504 )
1505 .unwrap();
1506
1507 session.context.attributes.insert(
1508 "git_branch".to_string(),
1509 serde_json::Value::String("from-session-updated".to_string()),
1510 );
1511 db.upsert_local_session(
1512 &session,
1513 "/Users/test/.claude/projects/foo/session-git-branch.jsonl",
1514 &fallback_git,
1515 )
1516 .unwrap();
1517
1518 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1519 let row = rows
1520 .iter()
1521 .find(|row| row.id == "session-git-branch")
1522 .expect("row exists");
1523 assert_eq!(row.git_branch.as_deref(), Some("from-session-updated"));
1524 }
1525
1526 #[test]
1527 fn test_upsert_local_session_marks_parented_sessions_auxiliary() {
1528 let db = test_db();
1529 let mut session = Session::new(
1530 "aux-upsert".to_string(),
1531 opensession_core::trace::Agent {
1532 provider: "openai".to_string(),
1533 model: "gpt-5".to_string(),
1534 tool: "opencode".to_string(),
1535 tool_version: None,
1536 },
1537 );
1538 session.stats.event_count = 1;
1539 session.context.attributes.insert(
1540 opensession_core::session::ATTR_PARENT_SESSION_ID.to_string(),
1541 serde_json::Value::String("parent-session".to_string()),
1542 );
1543
1544 db.upsert_local_session(
1545 &session,
1546 "/Users/test/.opencode/storage/session/project/aux-upsert.json",
1547 &crate::git::GitContext::default(),
1548 )
1549 .unwrap();
1550
1551 let is_auxiliary: i64 = db
1552 .conn()
1553 .query_row(
1554 "SELECT is_auxiliary FROM sessions WHERE id = ?1",
1555 params!["aux-upsert"],
1556 |row| row.get(0),
1557 )
1558 .unwrap();
1559 assert_eq!(is_auxiliary, 1);
1560
1561 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1562 assert!(
1563 rows.iter().all(|row| row.id != "aux-upsert"),
1564 "auxiliary sessions should be hidden from default listing"
1565 );
1566 }
1567
1568 #[test]
1569 fn test_upsert_local_session_primary_role_overrides_parent_link() {
1570 let db = test_db();
1571 let mut session = Session::new(
1572 "primary-override".to_string(),
1573 opensession_core::trace::Agent {
1574 provider: "openai".to_string(),
1575 model: "gpt-5".to_string(),
1576 tool: "opencode".to_string(),
1577 tool_version: None,
1578 },
1579 );
1580 session.stats.event_count = 1;
1581 session.context.attributes.insert(
1582 opensession_core::session::ATTR_PARENT_SESSION_ID.to_string(),
1583 serde_json::Value::String("parent-session".to_string()),
1584 );
1585 session.context.attributes.insert(
1586 opensession_core::session::ATTR_SESSION_ROLE.to_string(),
1587 serde_json::Value::String("primary".to_string()),
1588 );
1589
1590 db.upsert_local_session(
1591 &session,
1592 "/Users/test/.opencode/storage/session/project/primary-override.json",
1593 &crate::git::GitContext::default(),
1594 )
1595 .unwrap();
1596
1597 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1598 let row = rows
1599 .iter()
1600 .find(|row| row.id == "primary-override")
1601 .expect("session with explicit primary role should stay visible");
1602 assert!(!row.is_auxiliary);
1603 }
1604
1605 #[test]
1606 fn test_sync_cursor() {
1607 let db = test_db();
1608 assert_eq!(db.get_sync_cursor("team1").unwrap(), None);
1609 db.set_sync_cursor("team1", "2024-01-01T00:00:00Z").unwrap();
1610 assert_eq!(
1611 db.get_sync_cursor("team1").unwrap(),
1612 Some("2024-01-01T00:00:00Z".to_string())
1613 );
1614 db.set_sync_cursor("team1", "2024-06-01T00:00:00Z").unwrap();
1616 assert_eq!(
1617 db.get_sync_cursor("team1").unwrap(),
1618 Some("2024-06-01T00:00:00Z".to_string())
1619 );
1620 }
1621
1622 #[test]
1623 fn test_body_cache() {
1624 let db = test_db();
1625 assert_eq!(db.get_cached_body("s1").unwrap(), None);
1626 db.cache_body("s1", b"hello world").unwrap();
1627 assert_eq!(
1628 db.get_cached_body("s1").unwrap(),
1629 Some(b"hello world".to_vec())
1630 );
1631 }
1632
1633 #[test]
1634 fn test_local_migrations_are_loaded_from_api_crate() {
1635 let migration_names: Vec<&str> = super::LOCAL_MIGRATIONS
1636 .iter()
1637 .map(|(name, _)| *name)
1638 .collect();
1639 assert!(
1640 migration_names.contains(&"local_0001_schema"),
1641 "expected local_0001_schema migration from opensession-api"
1642 );
1643 assert_eq!(
1644 migration_names.len(),
1645 1,
1646 "local schema should be single-step"
1647 );
1648
1649 let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
1650 let migrations_dir = manifest_dir.join("migrations");
1651 if migrations_dir.exists() {
1652 let sql_files = std::fs::read_dir(migrations_dir)
1653 .expect("read local-db migrations directory")
1654 .filter_map(Result::ok)
1655 .map(|entry| entry.file_name().to_string_lossy().to_string())
1656 .filter(|name| name.ends_with(".sql"))
1657 .collect::<Vec<_>>();
1658 assert!(
1659 sql_files.is_empty(),
1660 "local-db must not ship duplicated migration SQL files"
1661 );
1662 }
1663 }
1664
1665 #[test]
1666 fn test_local_schema_bootstrap_includes_is_auxiliary_column() {
1667 let dir = tempdir().unwrap();
1668 let path = dir.path().join("local.db");
1669 let db = LocalDb::open_path(&path).unwrap();
1670 let conn = db.conn();
1671 let mut stmt = conn.prepare("PRAGMA table_info(sessions)").unwrap();
1672 let columns = stmt
1673 .query_map([], |row| row.get::<_, String>(1))
1674 .unwrap()
1675 .collect::<std::result::Result<Vec<_>, _>>()
1676 .unwrap();
1677 assert!(
1678 columns.iter().any(|name| name == "is_auxiliary"),
1679 "sessions schema must include is_auxiliary column in bootstrap migration"
1680 );
1681 }
1682
1683 #[test]
1684 fn test_upsert_remote_session() {
1685 let db = test_db();
1686 let summary = RemoteSessionSummary {
1687 id: "remote-1".to_string(),
1688 user_id: Some("u1".to_string()),
1689 nickname: Some("alice".to_string()),
1690 team_id: "t1".to_string(),
1691 tool: "claude-code".to_string(),
1692 agent_provider: None,
1693 agent_model: None,
1694 title: Some("Test session".to_string()),
1695 description: None,
1696 tags: None,
1697 created_at: "2024-01-01T00:00:00Z".to_string(),
1698 uploaded_at: "2024-01-01T01:00:00Z".to_string(),
1699 message_count: 10,
1700 task_count: 2,
1701 event_count: 20,
1702 duration_seconds: 300,
1703 total_input_tokens: 1000,
1704 total_output_tokens: 500,
1705 git_remote: None,
1706 git_branch: None,
1707 git_commit: None,
1708 git_repo_name: None,
1709 pr_number: None,
1710 pr_url: None,
1711 working_directory: None,
1712 files_modified: None,
1713 files_read: None,
1714 has_errors: false,
1715 max_active_agents: 1,
1716 };
1717 db.upsert_remote_session(&summary).unwrap();
1718
1719 let sessions = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1720 assert_eq!(sessions.len(), 1);
1721 assert_eq!(sessions[0].id, "remote-1");
1722 assert_eq!(sessions[0].sync_status, "remote_only");
1723 assert_eq!(sessions[0].nickname, None); assert!(!sessions[0].is_auxiliary);
1725 }
1726
1727 #[test]
1728 fn test_list_filter_by_repo() {
1729 let db = test_db();
1730 let summary1 = RemoteSessionSummary {
1732 id: "s1".to_string(),
1733 user_id: None,
1734 nickname: None,
1735 team_id: "t1".to_string(),
1736 tool: "claude-code".to_string(),
1737 agent_provider: None,
1738 agent_model: None,
1739 title: Some("Session 1".to_string()),
1740 description: None,
1741 tags: None,
1742 created_at: "2024-01-01T00:00:00Z".to_string(),
1743 uploaded_at: "2024-01-01T01:00:00Z".to_string(),
1744 message_count: 5,
1745 task_count: 0,
1746 event_count: 10,
1747 duration_seconds: 60,
1748 total_input_tokens: 100,
1749 total_output_tokens: 50,
1750 git_remote: None,
1751 git_branch: None,
1752 git_commit: None,
1753 git_repo_name: None,
1754 pr_number: None,
1755 pr_url: None,
1756 working_directory: None,
1757 files_modified: None,
1758 files_read: None,
1759 has_errors: false,
1760 max_active_agents: 1,
1761 };
1762 db.upsert_remote_session(&summary1).unwrap();
1763
1764 let filter = LocalSessionFilter {
1766 team_id: Some("t1".to_string()),
1767 ..Default::default()
1768 };
1769 assert_eq!(db.list_sessions(&filter).unwrap().len(), 1);
1770
1771 let filter = LocalSessionFilter {
1772 team_id: Some("t999".to_string()),
1773 ..Default::default()
1774 };
1775 assert_eq!(db.list_sessions(&filter).unwrap().len(), 0);
1776 }
1777
1778 fn make_summary(id: &str, tool: &str, title: &str, created_at: &str) -> RemoteSessionSummary {
1781 RemoteSessionSummary {
1782 id: id.to_string(),
1783 user_id: None,
1784 nickname: None,
1785 team_id: "t1".to_string(),
1786 tool: tool.to_string(),
1787 agent_provider: Some("anthropic".to_string()),
1788 agent_model: Some("claude-opus-4-6".to_string()),
1789 title: Some(title.to_string()),
1790 description: None,
1791 tags: None,
1792 created_at: created_at.to_string(),
1793 uploaded_at: created_at.to_string(),
1794 message_count: 5,
1795 task_count: 1,
1796 event_count: 10,
1797 duration_seconds: 300,
1798 total_input_tokens: 1000,
1799 total_output_tokens: 500,
1800 git_remote: None,
1801 git_branch: None,
1802 git_commit: None,
1803 git_repo_name: None,
1804 pr_number: None,
1805 pr_url: None,
1806 working_directory: None,
1807 files_modified: None,
1808 files_read: None,
1809 has_errors: false,
1810 max_active_agents: 1,
1811 }
1812 }
1813
1814 fn seed_sessions(db: &LocalDb) {
1815 db.upsert_remote_session(&make_summary(
1817 "s1",
1818 "claude-code",
1819 "First session",
1820 "2024-01-01T00:00:00Z",
1821 ))
1822 .unwrap();
1823 db.upsert_remote_session(&make_summary(
1824 "s2",
1825 "claude-code",
1826 "JWT auth work",
1827 "2024-01-02T00:00:00Z",
1828 ))
1829 .unwrap();
1830 db.upsert_remote_session(&make_summary(
1831 "s3",
1832 "gemini",
1833 "Gemini test",
1834 "2024-01-03T00:00:00Z",
1835 ))
1836 .unwrap();
1837 db.upsert_remote_session(&make_summary(
1838 "s4",
1839 "claude-code",
1840 "Error handling",
1841 "2024-01-04T00:00:00Z",
1842 ))
1843 .unwrap();
1844 db.upsert_remote_session(&make_summary(
1845 "s5",
1846 "claude-code",
1847 "Final polish",
1848 "2024-01-05T00:00:00Z",
1849 ))
1850 .unwrap();
1851 }
1852
1853 #[test]
1856 fn test_log_no_filters() {
1857 let db = test_db();
1858 seed_sessions(&db);
1859 let filter = LogFilter::default();
1860 let results = db.list_sessions_log(&filter).unwrap();
1861 assert_eq!(results.len(), 5);
1862 assert_eq!(results[0].id, "s5");
1864 assert_eq!(results[4].id, "s1");
1865 }
1866
1867 #[test]
1868 fn test_log_filter_by_tool() {
1869 let db = test_db();
1870 seed_sessions(&db);
1871 let filter = LogFilter {
1872 tool: Some("claude-code".to_string()),
1873 ..Default::default()
1874 };
1875 let results = db.list_sessions_log(&filter).unwrap();
1876 assert_eq!(results.len(), 4);
1877 assert!(results.iter().all(|s| s.tool == "claude-code"));
1878 }
1879
1880 #[test]
1881 fn test_log_filter_by_model_wildcard() {
1882 let db = test_db();
1883 seed_sessions(&db);
1884 let filter = LogFilter {
1885 model: Some("claude*".to_string()),
1886 ..Default::default()
1887 };
1888 let results = db.list_sessions_log(&filter).unwrap();
1889 assert_eq!(results.len(), 5); }
1891
1892 #[test]
1893 fn test_log_filter_since() {
1894 let db = test_db();
1895 seed_sessions(&db);
1896 let filter = LogFilter {
1897 since: Some("2024-01-03T00:00:00Z".to_string()),
1898 ..Default::default()
1899 };
1900 let results = db.list_sessions_log(&filter).unwrap();
1901 assert_eq!(results.len(), 3); }
1903
1904 #[test]
1905 fn test_log_filter_before() {
1906 let db = test_db();
1907 seed_sessions(&db);
1908 let filter = LogFilter {
1909 before: Some("2024-01-03T00:00:00Z".to_string()),
1910 ..Default::default()
1911 };
1912 let results = db.list_sessions_log(&filter).unwrap();
1913 assert_eq!(results.len(), 2); }
1915
1916 #[test]
1917 fn test_log_filter_since_and_before() {
1918 let db = test_db();
1919 seed_sessions(&db);
1920 let filter = LogFilter {
1921 since: Some("2024-01-02T00:00:00Z".to_string()),
1922 before: Some("2024-01-04T00:00:00Z".to_string()),
1923 ..Default::default()
1924 };
1925 let results = db.list_sessions_log(&filter).unwrap();
1926 assert_eq!(results.len(), 2); }
1928
1929 #[test]
1930 fn test_log_filter_grep() {
1931 let db = test_db();
1932 seed_sessions(&db);
1933 let filter = LogFilter {
1934 grep: Some("JWT".to_string()),
1935 ..Default::default()
1936 };
1937 let results = db.list_sessions_log(&filter).unwrap();
1938 assert_eq!(results.len(), 1);
1939 assert_eq!(results[0].id, "s2");
1940 }
1941
1942 #[test]
1943 fn test_log_limit_and_offset() {
1944 let db = test_db();
1945 seed_sessions(&db);
1946 let filter = LogFilter {
1947 limit: Some(2),
1948 offset: Some(1),
1949 ..Default::default()
1950 };
1951 let results = db.list_sessions_log(&filter).unwrap();
1952 assert_eq!(results.len(), 2);
1953 assert_eq!(results[0].id, "s4"); assert_eq!(results[1].id, "s3");
1955 }
1956
1957 #[test]
1958 fn test_log_limit_only() {
1959 let db = test_db();
1960 seed_sessions(&db);
1961 let filter = LogFilter {
1962 limit: Some(3),
1963 ..Default::default()
1964 };
1965 let results = db.list_sessions_log(&filter).unwrap();
1966 assert_eq!(results.len(), 3);
1967 }
1968
1969 #[test]
1970 fn test_list_sessions_limit_offset() {
1971 let db = test_db();
1972 seed_sessions(&db);
1973 let filter = LocalSessionFilter {
1974 limit: Some(2),
1975 offset: Some(1),
1976 ..Default::default()
1977 };
1978 let results = db.list_sessions(&filter).unwrap();
1979 assert_eq!(results.len(), 2);
1980 assert_eq!(results[0].id, "s4");
1981 assert_eq!(results[1].id, "s3");
1982 }
1983
1984 #[test]
1985 fn test_count_sessions_filtered() {
1986 let db = test_db();
1987 seed_sessions(&db);
1988 let count = db
1989 .count_sessions_filtered(&LocalSessionFilter::default())
1990 .unwrap();
1991 assert_eq!(count, 5);
1992 }
1993
1994 #[test]
1995 fn test_list_and_count_filters_match_when_auxiliary_rows_exist() {
1996 let db = test_db();
1997 seed_sessions(&db);
1998 db.conn()
1999 .execute(
2000 "UPDATE sessions SET is_auxiliary = 1 WHERE id IN ('s2', 's3')",
2001 [],
2002 )
2003 .unwrap();
2004
2005 let default_filter = LocalSessionFilter::default();
2006 let rows = db.list_sessions(&default_filter).unwrap();
2007 let count = db.count_sessions_filtered(&default_filter).unwrap();
2008 assert_eq!(rows.len() as i64, count);
2009 assert!(rows.iter().all(|row| !row.is_auxiliary));
2010
2011 let gemini_filter = LocalSessionFilter {
2012 tool: Some("gemini".to_string()),
2013 ..Default::default()
2014 };
2015 let gemini_rows = db.list_sessions(&gemini_filter).unwrap();
2016 let gemini_count = db.count_sessions_filtered(&gemini_filter).unwrap();
2017 assert_eq!(gemini_rows.len() as i64, gemini_count);
2018 assert!(gemini_rows.is_empty());
2019 assert_eq!(gemini_count, 0);
2020 }
2021
2022 #[test]
2023 fn test_exclude_low_signal_filter_hides_metadata_only_sessions() {
2024 let db = test_db();
2025
2026 let mut low_signal = make_summary("meta-only", "claude-code", "", "2024-01-01T00:00:00Z");
2027 low_signal.title = None;
2028 low_signal.message_count = 0;
2029 low_signal.task_count = 0;
2030 low_signal.event_count = 2;
2031 low_signal.git_repo_name = Some("frontend/aviss-react-front".to_string());
2032
2033 let mut normal = make_summary(
2034 "real-work",
2035 "opencode",
2036 "Socket.IO decision",
2037 "2024-01-02T00:00:00Z",
2038 );
2039 normal.message_count = 14;
2040 normal.task_count = 2;
2041 normal.event_count = 38;
2042 normal.git_repo_name = Some("frontend/aviss-react-front".to_string());
2043
2044 db.upsert_remote_session(&low_signal).unwrap();
2045 db.upsert_remote_session(&normal).unwrap();
2046
2047 let default_filter = LocalSessionFilter {
2048 git_repo_name: Some("frontend/aviss-react-front".to_string()),
2049 ..Default::default()
2050 };
2051 assert_eq!(db.list_sessions(&default_filter).unwrap().len(), 2);
2052 assert_eq!(db.count_sessions_filtered(&default_filter).unwrap(), 2);
2053
2054 let repo_filter = LocalSessionFilter {
2055 git_repo_name: Some("frontend/aviss-react-front".to_string()),
2056 exclude_low_signal: true,
2057 ..Default::default()
2058 };
2059 let rows = db.list_sessions(&repo_filter).unwrap();
2060 assert_eq!(rows.len(), 1);
2061 assert_eq!(rows[0].id, "real-work");
2062 assert_eq!(db.count_sessions_filtered(&repo_filter).unwrap(), 1);
2063 }
2064
2065 #[test]
2066 fn test_list_working_directories_distinct_non_empty() {
2067 let db = test_db();
2068
2069 let mut a = make_summary("wd-1", "claude-code", "One", "2024-01-01T00:00:00Z");
2070 a.working_directory = Some("/tmp/repo-a".to_string());
2071 let mut b = make_summary("wd-2", "claude-code", "Two", "2024-01-02T00:00:00Z");
2072 b.working_directory = Some("/tmp/repo-a".to_string());
2073 let mut c = make_summary("wd-3", "claude-code", "Three", "2024-01-03T00:00:00Z");
2074 c.working_directory = Some("/tmp/repo-b".to_string());
2075 let mut d = make_summary("wd-4", "claude-code", "Four", "2024-01-04T00:00:00Z");
2076 d.working_directory = Some("".to_string());
2077
2078 db.upsert_remote_session(&a).unwrap();
2079 db.upsert_remote_session(&b).unwrap();
2080 db.upsert_remote_session(&c).unwrap();
2081 db.upsert_remote_session(&d).unwrap();
2082
2083 let dirs = db.list_working_directories().unwrap();
2084 assert_eq!(
2085 dirs,
2086 vec!["/tmp/repo-a".to_string(), "/tmp/repo-b".to_string()]
2087 );
2088 }
2089
2090 #[test]
2091 fn test_list_session_tools() {
2092 let db = test_db();
2093 seed_sessions(&db);
2094 let tools = db
2095 .list_session_tools(&LocalSessionFilter::default())
2096 .unwrap();
2097 assert_eq!(tools, vec!["claude-code".to_string(), "gemini".to_string()]);
2098 }
2099
2100 #[test]
2101 fn test_log_combined_filters() {
2102 let db = test_db();
2103 seed_sessions(&db);
2104 let filter = LogFilter {
2105 tool: Some("claude-code".to_string()),
2106 since: Some("2024-01-03T00:00:00Z".to_string()),
2107 limit: Some(1),
2108 ..Default::default()
2109 };
2110 let results = db.list_sessions_log(&filter).unwrap();
2111 assert_eq!(results.len(), 1);
2112 assert_eq!(results[0].id, "s5"); }
2114
2115 #[test]
2118 fn test_get_session_by_offset() {
2119 let db = test_db();
2120 seed_sessions(&db);
2121 let row = db.get_session_by_offset(0).unwrap().unwrap();
2122 assert_eq!(row.id, "s5"); let row = db.get_session_by_offset(2).unwrap().unwrap();
2124 assert_eq!(row.id, "s3");
2125 assert!(db.get_session_by_offset(10).unwrap().is_none());
2126 }
2127
2128 #[test]
2129 fn test_get_session_by_tool_offset() {
2130 let db = test_db();
2131 seed_sessions(&db);
2132 let row = db
2133 .get_session_by_tool_offset("claude-code", 0)
2134 .unwrap()
2135 .unwrap();
2136 assert_eq!(row.id, "s5");
2137 let row = db
2138 .get_session_by_tool_offset("claude-code", 1)
2139 .unwrap()
2140 .unwrap();
2141 assert_eq!(row.id, "s4");
2142 let row = db.get_session_by_tool_offset("gemini", 0).unwrap().unwrap();
2143 assert_eq!(row.id, "s3");
2144 assert!(db
2145 .get_session_by_tool_offset("gemini", 1)
2146 .unwrap()
2147 .is_none());
2148 }
2149
2150 #[test]
2151 fn test_get_sessions_latest() {
2152 let db = test_db();
2153 seed_sessions(&db);
2154 let rows = db.get_sessions_latest(3).unwrap();
2155 assert_eq!(rows.len(), 3);
2156 assert_eq!(rows[0].id, "s5");
2157 assert_eq!(rows[1].id, "s4");
2158 assert_eq!(rows[2].id, "s3");
2159 }
2160
2161 #[test]
2162 fn test_get_sessions_by_tool_latest() {
2163 let db = test_db();
2164 seed_sessions(&db);
2165 let rows = db.get_sessions_by_tool_latest("claude-code", 2).unwrap();
2166 assert_eq!(rows.len(), 2);
2167 assert_eq!(rows[0].id, "s5");
2168 assert_eq!(rows[1].id, "s4");
2169 }
2170
2171 #[test]
2172 fn test_get_sessions_latest_more_than_available() {
2173 let db = test_db();
2174 seed_sessions(&db);
2175 let rows = db.get_sessions_by_tool_latest("gemini", 10).unwrap();
2176 assert_eq!(rows.len(), 1); }
2178
2179 #[test]
2180 fn test_session_count() {
2181 let db = test_db();
2182 assert_eq!(db.session_count().unwrap(), 0);
2183 seed_sessions(&db);
2184 assert_eq!(db.session_count().unwrap(), 5);
2185 }
2186}