1pub mod git;
2
3use anyhow::{Context, Result};
4use opensession_api::db::migrations::{LOCAL_MIGRATIONS, MIGRATIONS};
5use opensession_core::session::{is_auxiliary_session, working_directory};
6use opensession_core::trace::Session;
7use rusqlite::{params, Connection, OptionalExtension};
8use serde_json::Value;
9use std::fs;
10use std::path::PathBuf;
11use std::sync::Mutex;
12
13use git::{normalize_repo_name, GitContext};
14
15#[derive(Debug, Clone)]
17pub struct LocalSessionRow {
18 pub id: String,
19 pub source_path: Option<String>,
20 pub sync_status: String,
21 pub last_synced_at: Option<String>,
22 pub user_id: Option<String>,
23 pub nickname: Option<String>,
24 pub team_id: Option<String>,
25 pub tool: String,
26 pub agent_provider: Option<String>,
27 pub agent_model: Option<String>,
28 pub title: Option<String>,
29 pub description: Option<String>,
30 pub tags: Option<String>,
31 pub created_at: String,
32 pub uploaded_at: Option<String>,
33 pub message_count: i64,
34 pub user_message_count: i64,
35 pub task_count: i64,
36 pub event_count: i64,
37 pub duration_seconds: i64,
38 pub total_input_tokens: i64,
39 pub total_output_tokens: i64,
40 pub git_remote: Option<String>,
41 pub git_branch: Option<String>,
42 pub git_commit: Option<String>,
43 pub git_repo_name: Option<String>,
44 pub pr_number: Option<i64>,
45 pub pr_url: Option<String>,
46 pub working_directory: Option<String>,
47 pub files_modified: Option<String>,
48 pub files_read: Option<String>,
49 pub has_errors: bool,
50 pub max_active_agents: i64,
51 pub is_auxiliary: bool,
52}
53
54pub fn is_opencode_child_session(row: &LocalSessionRow) -> bool {
56 row.tool == "opencode" && row.is_auxiliary
57}
58
59#[deprecated(
61 note = "Use parser/core canonical session role attributes instead of runtime file inspection"
62)]
63pub fn parse_opencode_parent_session_id(source_path: &str) -> Option<String> {
64 let text = fs::read_to_string(source_path).ok()?;
65 let json: Value = serde_json::from_str(&text).ok()?;
66 lookup_parent_session_id(&json)
67}
68
69fn lookup_parent_session_id(value: &Value) -> Option<String> {
70 match value {
71 Value::Object(obj) => {
72 for (key, value) in obj {
73 if is_parent_id_key(key) {
74 if let Some(parent_id) = value.as_str() {
75 let parent_id = parent_id.trim();
76 if !parent_id.is_empty() {
77 return Some(parent_id.to_string());
78 }
79 }
80 }
81 if let Some(parent_id) = lookup_parent_session_id(value) {
82 return Some(parent_id);
83 }
84 }
85 None
86 }
87 Value::Array(items) => items.iter().find_map(lookup_parent_session_id),
88 _ => None,
89 }
90}
91
92fn is_parent_id_key(key: &str) -> bool {
93 let flat = key
94 .chars()
95 .filter(|c| c.is_ascii_alphanumeric())
96 .map(|c| c.to_ascii_lowercase())
97 .collect::<String>();
98
99 flat == "parentid"
100 || flat == "parentuuid"
101 || flat == "parentsessionid"
102 || flat == "parentsessionuuid"
103 || flat.ends_with("parentsessionid")
104 || (flat.contains("parent") && flat.ends_with("id"))
105 || (flat.contains("parent") && flat.ends_with("uuid"))
106}
107
108pub fn hide_opencode_child_sessions(mut rows: Vec<LocalSessionRow>) -> Vec<LocalSessionRow> {
110 rows.retain(|row| !row.is_auxiliary);
111 rows
112}
113
114fn infer_tool_from_source_path(source_path: Option<&str>) -> Option<&'static str> {
115 let source_path = source_path.map(|path| path.to_ascii_lowercase())?;
116
117 if source_path.contains("/.codex/sessions/")
118 || source_path.contains("\\.codex\\sessions\\")
119 || source_path.contains("/codex/sessions/")
120 || source_path.contains("\\codex\\sessions\\")
121 {
122 return Some("codex");
123 }
124
125 if source_path.contains("/.claude/projects/")
126 || source_path.contains("\\.claude\\projects\\")
127 || source_path.contains("/claude/projects/")
128 || source_path.contains("\\claude\\projects\\")
129 {
130 return Some("claude-code");
131 }
132
133 None
134}
135
136fn normalize_tool_for_source_path(current_tool: &str, source_path: Option<&str>) -> String {
137 infer_tool_from_source_path(source_path)
138 .unwrap_or(current_tool)
139 .to_string()
140}
141
142fn normalize_non_empty(value: Option<&str>) -> Option<String> {
143 value
144 .map(str::trim)
145 .filter(|value| !value.is_empty())
146 .map(ToOwned::to_owned)
147}
148
149fn json_object_string(value: &Value, keys: &[&str]) -> Option<String> {
150 let obj = value.as_object()?;
151 for key in keys {
152 if let Some(found) = obj.get(*key).and_then(Value::as_str) {
153 let normalized = found.trim();
154 if !normalized.is_empty() {
155 return Some(normalized.to_string());
156 }
157 }
158 }
159 None
160}
161
162fn git_context_from_session_attributes(session: &Session) -> GitContext {
163 let attrs = &session.context.attributes;
164
165 let mut remote = normalize_non_empty(attrs.get("git_remote").and_then(Value::as_str));
166 let mut branch = normalize_non_empty(attrs.get("git_branch").and_then(Value::as_str));
167 let mut commit = normalize_non_empty(attrs.get("git_commit").and_then(Value::as_str));
168 let mut repo_name = normalize_non_empty(attrs.get("git_repo_name").and_then(Value::as_str));
169
170 if let Some(git_value) = attrs.get("git") {
171 if remote.is_none() {
172 remote = json_object_string(
173 git_value,
174 &["remote", "repository_url", "repo_url", "origin", "url"],
175 );
176 }
177 if branch.is_none() {
178 branch = json_object_string(
179 git_value,
180 &["branch", "git_branch", "current_branch", "ref", "head"],
181 );
182 }
183 if commit.is_none() {
184 commit = json_object_string(git_value, &["commit", "commit_hash", "sha", "git_commit"]);
185 }
186 if repo_name.is_none() {
187 repo_name = json_object_string(git_value, &["repo_name", "repository", "repo", "name"]);
188 }
189 }
190
191 if repo_name.is_none() {
192 repo_name = remote
193 .as_deref()
194 .and_then(normalize_repo_name)
195 .map(ToOwned::to_owned);
196 }
197
198 GitContext {
199 remote,
200 branch,
201 commit,
202 repo_name,
203 }
204}
205
206fn git_context_has_any_field(git: &GitContext) -> bool {
207 git.remote.is_some() || git.branch.is_some() || git.commit.is_some() || git.repo_name.is_some()
208}
209
210fn merge_git_context(preferred: &GitContext, fallback: &GitContext) -> GitContext {
211 GitContext {
212 remote: preferred.remote.clone().or_else(|| fallback.remote.clone()),
213 branch: preferred.branch.clone().or_else(|| fallback.branch.clone()),
214 commit: preferred.commit.clone().or_else(|| fallback.commit.clone()),
215 repo_name: preferred
216 .repo_name
217 .clone()
218 .or_else(|| fallback.repo_name.clone()),
219 }
220}
221
222#[derive(Debug, Clone)]
224pub struct LocalSessionFilter {
225 pub team_id: Option<String>,
226 pub sync_status: Option<String>,
227 pub git_repo_name: Option<String>,
228 pub search: Option<String>,
229 pub exclude_low_signal: bool,
230 pub tool: Option<String>,
231 pub sort: LocalSortOrder,
232 pub time_range: LocalTimeRange,
233 pub limit: Option<u32>,
234 pub offset: Option<u32>,
235}
236
237impl Default for LocalSessionFilter {
238 fn default() -> Self {
239 Self {
240 team_id: None,
241 sync_status: None,
242 git_repo_name: None,
243 search: None,
244 exclude_low_signal: false,
245 tool: None,
246 sort: LocalSortOrder::Recent,
247 time_range: LocalTimeRange::All,
248 limit: None,
249 offset: None,
250 }
251 }
252}
253
254#[derive(Debug, Clone, Default, PartialEq, Eq)]
256pub enum LocalSortOrder {
257 #[default]
258 Recent,
259 Popular,
260 Longest,
261}
262
263#[derive(Debug, Clone, Default, PartialEq, Eq)]
265pub enum LocalTimeRange {
266 Hours24,
267 Days7,
268 Days30,
269 #[default]
270 All,
271}
272
273#[derive(Debug, Clone)]
275pub struct RemoteSessionSummary {
276 pub id: String,
277 pub user_id: Option<String>,
278 pub nickname: Option<String>,
279 pub team_id: String,
280 pub tool: String,
281 pub agent_provider: Option<String>,
282 pub agent_model: Option<String>,
283 pub title: Option<String>,
284 pub description: Option<String>,
285 pub tags: Option<String>,
286 pub created_at: String,
287 pub uploaded_at: String,
288 pub message_count: i64,
289 pub task_count: i64,
290 pub event_count: i64,
291 pub duration_seconds: i64,
292 pub total_input_tokens: i64,
293 pub total_output_tokens: i64,
294 pub git_remote: Option<String>,
295 pub git_branch: Option<String>,
296 pub git_commit: Option<String>,
297 pub git_repo_name: Option<String>,
298 pub pr_number: Option<i64>,
299 pub pr_url: Option<String>,
300 pub working_directory: Option<String>,
301 pub files_modified: Option<String>,
302 pub files_read: Option<String>,
303 pub has_errors: bool,
304 pub max_active_agents: i64,
305}
306
307#[derive(Debug, Default)]
309pub struct LogFilter {
310 pub tool: Option<String>,
312 pub model: Option<String>,
314 pub since: Option<String>,
316 pub before: Option<String>,
318 pub touches: Option<String>,
320 pub grep: Option<String>,
322 pub has_errors: Option<bool>,
324 pub working_directory: Option<String>,
326 pub git_repo_name: Option<String>,
328 pub limit: Option<u32>,
330 pub offset: Option<u32>,
332}
333
334const FROM_CLAUSE: &str = "\
336FROM sessions s \
337LEFT JOIN session_sync ss ON ss.session_id = s.id \
338LEFT JOIN users u ON u.id = s.user_id";
339
340pub struct LocalDb {
344 conn: Mutex<Connection>,
345}
346
347impl LocalDb {
348 pub fn open() -> Result<Self> {
351 let path = default_db_path()?;
352 Self::open_path(&path)
353 }
354
355 pub fn open_path(path: &PathBuf) -> Result<Self> {
357 if let Some(parent) = path.parent() {
358 std::fs::create_dir_all(parent)
359 .with_context(|| format!("create dir for {}", path.display()))?;
360 }
361 let conn = open_connection_with_latest_schema(path)
362 .with_context(|| format!("open local db {}", path.display()))?;
363 Ok(Self {
364 conn: Mutex::new(conn),
365 })
366 }
367
368 fn conn(&self) -> std::sync::MutexGuard<'_, Connection> {
369 self.conn.lock().expect("local db mutex poisoned")
370 }
371
372 pub fn upsert_local_session(
375 &self,
376 session: &Session,
377 source_path: &str,
378 git: &GitContext,
379 ) -> Result<()> {
380 let title = session.context.title.as_deref();
381 let description = session.context.description.as_deref();
382 let tags = if session.context.tags.is_empty() {
383 None
384 } else {
385 Some(session.context.tags.join(","))
386 };
387 let created_at = session.context.created_at.to_rfc3339();
388 let cwd = working_directory(session).map(String::from);
389 let is_auxiliary = is_auxiliary_session(session);
390
391 let (files_modified, files_read, has_errors) =
393 opensession_core::extract::extract_file_metadata(session);
394 let max_active_agents = opensession_core::agent_metrics::max_active_agents(session) as i64;
395 let normalized_tool =
396 normalize_tool_for_source_path(&session.agent.tool, Some(source_path));
397 let git_from_session = git_context_from_session_attributes(session);
398 let has_session_git = git_context_has_any_field(&git_from_session);
399 let merged_git = merge_git_context(&git_from_session, git);
400
401 let conn = self.conn();
402 conn.execute(
404 "INSERT INTO sessions \
405 (id, team_id, tool, agent_provider, agent_model, \
406 title, description, tags, created_at, \
407 message_count, user_message_count, task_count, event_count, duration_seconds, \
408 total_input_tokens, total_output_tokens, body_storage_key, \
409 git_remote, git_branch, git_commit, git_repo_name, working_directory, \
410 files_modified, files_read, has_errors, max_active_agents, is_auxiliary) \
411 VALUES (?1,'personal',?2,?3,?4,?5,?6,?7,?8,?9,?10,?11,?12,?13,?14,?15,'',?16,?17,?18,?19,?20,?21,?22,?23,?24,?25) \
412 ON CONFLICT(id) DO UPDATE SET \
413 tool=excluded.tool, agent_provider=excluded.agent_provider, \
414 agent_model=excluded.agent_model, \
415 title=excluded.title, description=excluded.description, \
416 tags=excluded.tags, \
417 message_count=excluded.message_count, user_message_count=excluded.user_message_count, \
418 task_count=excluded.task_count, \
419 event_count=excluded.event_count, duration_seconds=excluded.duration_seconds, \
420 total_input_tokens=excluded.total_input_tokens, \
421 total_output_tokens=excluded.total_output_tokens, \
422 git_remote=CASE WHEN ?26=1 THEN excluded.git_remote ELSE COALESCE(git_remote, excluded.git_remote) END, \
423 git_branch=CASE WHEN ?26=1 THEN excluded.git_branch ELSE COALESCE(git_branch, excluded.git_branch) END, \
424 git_commit=CASE WHEN ?26=1 THEN excluded.git_commit ELSE COALESCE(git_commit, excluded.git_commit) END, \
425 git_repo_name=CASE WHEN ?26=1 THEN excluded.git_repo_name ELSE COALESCE(git_repo_name, excluded.git_repo_name) END, \
426 working_directory=excluded.working_directory, \
427 files_modified=excluded.files_modified, files_read=excluded.files_read, \
428 has_errors=excluded.has_errors, \
429 max_active_agents=excluded.max_active_agents, \
430 is_auxiliary=excluded.is_auxiliary",
431 params![
432 &session.session_id,
433 &normalized_tool,
434 &session.agent.provider,
435 &session.agent.model,
436 title,
437 description,
438 &tags,
439 &created_at,
440 session.stats.message_count as i64,
441 session.stats.user_message_count as i64,
442 session.stats.task_count as i64,
443 session.stats.event_count as i64,
444 session.stats.duration_seconds as i64,
445 session.stats.total_input_tokens as i64,
446 session.stats.total_output_tokens as i64,
447 &merged_git.remote,
448 &merged_git.branch,
449 &merged_git.commit,
450 &merged_git.repo_name,
451 &cwd,
452 &files_modified,
453 &files_read,
454 has_errors,
455 max_active_agents,
456 is_auxiliary as i64,
457 has_session_git as i64,
458 ],
459 )?;
460
461 conn.execute(
462 "INSERT INTO session_sync (session_id, source_path, sync_status) \
463 VALUES (?1, ?2, 'local_only') \
464 ON CONFLICT(session_id) DO UPDATE SET source_path=excluded.source_path",
465 params![&session.session_id, source_path],
466 )?;
467 Ok(())
468 }
469
470 pub fn upsert_remote_session(&self, summary: &RemoteSessionSummary) -> Result<()> {
473 let conn = self.conn();
474 conn.execute(
476 "INSERT INTO sessions \
477 (id, user_id, team_id, tool, agent_provider, agent_model, \
478 title, description, tags, created_at, uploaded_at, \
479 message_count, task_count, event_count, duration_seconds, \
480 total_input_tokens, total_output_tokens, body_storage_key, \
481 git_remote, git_branch, git_commit, git_repo_name, \
482 pr_number, pr_url, working_directory, \
483 files_modified, files_read, has_errors, max_active_agents, is_auxiliary) \
484 VALUES (?1,?2,?3,?4,?5,?6,?7,?8,?9,?10,?11,?12,?13,?14,?15,?16,?17,'',?18,?19,?20,?21,?22,?23,?24,?25,?26,?27,?28,0) \
485 ON CONFLICT(id) DO UPDATE SET \
486 title=excluded.title, description=excluded.description, \
487 tags=excluded.tags, uploaded_at=excluded.uploaded_at, \
488 message_count=excluded.message_count, task_count=excluded.task_count, \
489 event_count=excluded.event_count, duration_seconds=excluded.duration_seconds, \
490 total_input_tokens=excluded.total_input_tokens, \
491 total_output_tokens=excluded.total_output_tokens, \
492 git_remote=excluded.git_remote, git_branch=excluded.git_branch, \
493 git_commit=excluded.git_commit, git_repo_name=excluded.git_repo_name, \
494 pr_number=excluded.pr_number, pr_url=excluded.pr_url, \
495 working_directory=excluded.working_directory, \
496 files_modified=excluded.files_modified, files_read=excluded.files_read, \
497 has_errors=excluded.has_errors, \
498 max_active_agents=excluded.max_active_agents, \
499 is_auxiliary=excluded.is_auxiliary",
500 params![
501 &summary.id,
502 &summary.user_id,
503 &summary.team_id,
504 &summary.tool,
505 &summary.agent_provider,
506 &summary.agent_model,
507 &summary.title,
508 &summary.description,
509 &summary.tags,
510 &summary.created_at,
511 &summary.uploaded_at,
512 summary.message_count,
513 summary.task_count,
514 summary.event_count,
515 summary.duration_seconds,
516 summary.total_input_tokens,
517 summary.total_output_tokens,
518 &summary.git_remote,
519 &summary.git_branch,
520 &summary.git_commit,
521 &summary.git_repo_name,
522 summary.pr_number,
523 &summary.pr_url,
524 &summary.working_directory,
525 &summary.files_modified,
526 &summary.files_read,
527 summary.has_errors,
528 summary.max_active_agents,
529 ],
530 )?;
531
532 conn.execute(
533 "INSERT INTO session_sync (session_id, sync_status) \
534 VALUES (?1, 'remote_only') \
535 ON CONFLICT(session_id) DO UPDATE SET \
536 sync_status = CASE WHEN session_sync.sync_status = 'local_only' THEN 'synced' ELSE session_sync.sync_status END",
537 params![&summary.id],
538 )?;
539 Ok(())
540 }
541
542 fn build_local_session_where_clause(
545 filter: &LocalSessionFilter,
546 ) -> (String, Vec<Box<dyn rusqlite::types::ToSql>>) {
547 let mut where_clauses = vec![
548 "1=1".to_string(),
549 "COALESCE(s.is_auxiliary, 0) = 0".to_string(),
550 ];
551 let mut param_values: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
552 let mut idx = 1u32;
553
554 if let Some(ref team_id) = filter.team_id {
555 where_clauses.push(format!("s.team_id = ?{idx}"));
556 param_values.push(Box::new(team_id.clone()));
557 idx += 1;
558 }
559
560 if let Some(ref sync_status) = filter.sync_status {
561 where_clauses.push(format!("COALESCE(ss.sync_status, 'unknown') = ?{idx}"));
562 param_values.push(Box::new(sync_status.clone()));
563 idx += 1;
564 }
565
566 if let Some(ref repo) = filter.git_repo_name {
567 where_clauses.push(format!("s.git_repo_name = ?{idx}"));
568 param_values.push(Box::new(repo.clone()));
569 idx += 1;
570 }
571
572 if let Some(ref tool) = filter.tool {
573 where_clauses.push(format!("s.tool = ?{idx}"));
574 param_values.push(Box::new(tool.clone()));
575 idx += 1;
576 }
577
578 if let Some(ref search) = filter.search {
579 let like = format!("%{search}%");
580 where_clauses.push(format!(
581 "(s.title LIKE ?{i1} OR s.description LIKE ?{i2} OR s.tags LIKE ?{i3})",
582 i1 = idx,
583 i2 = idx + 1,
584 i3 = idx + 2,
585 ));
586 param_values.push(Box::new(like.clone()));
587 param_values.push(Box::new(like.clone()));
588 param_values.push(Box::new(like));
589 idx += 3;
590 }
591
592 if filter.exclude_low_signal {
593 where_clauses.push(
594 "NOT (COALESCE(s.message_count, 0) = 0 \
595 AND COALESCE(s.user_message_count, 0) = 0 \
596 AND COALESCE(s.task_count, 0) = 0 \
597 AND COALESCE(s.event_count, 0) <= 2 \
598 AND (s.title IS NULL OR TRIM(s.title) = ''))"
599 .to_string(),
600 );
601 }
602
603 let interval = match filter.time_range {
604 LocalTimeRange::Hours24 => Some("-1 day"),
605 LocalTimeRange::Days7 => Some("-7 days"),
606 LocalTimeRange::Days30 => Some("-30 days"),
607 LocalTimeRange::All => None,
608 };
609 if let Some(interval) = interval {
610 where_clauses.push(format!("datetime(s.created_at) >= datetime('now', ?{idx})"));
611 param_values.push(Box::new(interval.to_string()));
612 }
613
614 (where_clauses.join(" AND "), param_values)
615 }
616
617 pub fn list_sessions(&self, filter: &LocalSessionFilter) -> Result<Vec<LocalSessionRow>> {
618 let (where_str, mut param_values) = Self::build_local_session_where_clause(filter);
619 let order_clause = match filter.sort {
620 LocalSortOrder::Popular => "s.message_count DESC, s.created_at DESC",
621 LocalSortOrder::Longest => "s.duration_seconds DESC, s.created_at DESC",
622 LocalSortOrder::Recent => "s.created_at DESC",
623 };
624
625 let mut sql = format!(
626 "SELECT {LOCAL_SESSION_COLUMNS} \
627 {FROM_CLAUSE} WHERE {where_str} \
628 ORDER BY {order_clause}"
629 );
630
631 if let Some(limit) = filter.limit {
632 sql.push_str(" LIMIT ?");
633 param_values.push(Box::new(limit));
634 if let Some(offset) = filter.offset {
635 sql.push_str(" OFFSET ?");
636 param_values.push(Box::new(offset));
637 }
638 }
639
640 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
641 param_values.iter().map(|p| p.as_ref()).collect();
642 let conn = self.conn();
643 let mut stmt = conn.prepare(&sql)?;
644 let rows = stmt.query_map(param_refs.as_slice(), row_to_local_session)?;
645
646 let mut result = Vec::new();
647 for row in rows {
648 result.push(row?);
649 }
650
651 Ok(result)
652 }
653
654 pub fn count_sessions_filtered(&self, filter: &LocalSessionFilter) -> Result<i64> {
656 let mut count_filter = filter.clone();
657 count_filter.limit = None;
658 count_filter.offset = None;
659 let (where_str, param_values) = Self::build_local_session_where_clause(&count_filter);
660 let sql = format!("SELECT COUNT(*) {FROM_CLAUSE} WHERE {where_str}");
661 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
662 param_values.iter().map(|p| p.as_ref()).collect();
663 let conn = self.conn();
664 let count = conn.query_row(&sql, param_refs.as_slice(), |row| row.get(0))?;
665 Ok(count)
666 }
667
668 pub fn list_session_tools(&self, filter: &LocalSessionFilter) -> Result<Vec<String>> {
670 let mut tool_filter = filter.clone();
671 tool_filter.tool = None;
672 tool_filter.limit = None;
673 tool_filter.offset = None;
674 let (where_str, param_values) = Self::build_local_session_where_clause(&tool_filter);
675 let sql = format!(
676 "SELECT DISTINCT s.tool \
677 {FROM_CLAUSE} WHERE {where_str} \
678 ORDER BY s.tool ASC"
679 );
680 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
681 param_values.iter().map(|p| p.as_ref()).collect();
682 let conn = self.conn();
683 let mut stmt = conn.prepare(&sql)?;
684 let rows = stmt.query_map(param_refs.as_slice(), |row| row.get::<_, String>(0))?;
685
686 let mut tools = Vec::new();
687 for row in rows {
688 let tool = row?;
689 if !tool.trim().is_empty() {
690 tools.push(tool);
691 }
692 }
693 Ok(tools)
694 }
695
696 pub fn list_sessions_log(&self, filter: &LogFilter) -> Result<Vec<LocalSessionRow>> {
700 let mut where_clauses = vec![
701 "1=1".to_string(),
702 "COALESCE(s.is_auxiliary, 0) = 0".to_string(),
703 ];
704 let mut param_values: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
705 let mut idx = 1u32;
706
707 if let Some(ref tool) = filter.tool {
708 where_clauses.push(format!("s.tool = ?{idx}"));
709 param_values.push(Box::new(tool.clone()));
710 idx += 1;
711 }
712
713 if let Some(ref model) = filter.model {
714 let like = model.replace('*', "%");
715 where_clauses.push(format!("s.agent_model LIKE ?{idx}"));
716 param_values.push(Box::new(like));
717 idx += 1;
718 }
719
720 if let Some(ref since) = filter.since {
721 where_clauses.push(format!("s.created_at >= ?{idx}"));
722 param_values.push(Box::new(since.clone()));
723 idx += 1;
724 }
725
726 if let Some(ref before) = filter.before {
727 where_clauses.push(format!("s.created_at < ?{idx}"));
728 param_values.push(Box::new(before.clone()));
729 idx += 1;
730 }
731
732 if let Some(ref touches) = filter.touches {
733 let like = format!("%\"{touches}\"%");
734 where_clauses.push(format!("s.files_modified LIKE ?{idx}"));
735 param_values.push(Box::new(like));
736 idx += 1;
737 }
738
739 if let Some(ref grep) = filter.grep {
740 let like = format!("%{grep}%");
741 where_clauses.push(format!(
742 "(s.title LIKE ?{i1} OR s.description LIKE ?{i2} OR s.tags LIKE ?{i3})",
743 i1 = idx,
744 i2 = idx + 1,
745 i3 = idx + 2,
746 ));
747 param_values.push(Box::new(like.clone()));
748 param_values.push(Box::new(like.clone()));
749 param_values.push(Box::new(like));
750 idx += 3;
751 }
752
753 if let Some(true) = filter.has_errors {
754 where_clauses.push("s.has_errors = 1".to_string());
755 }
756
757 if let Some(ref wd) = filter.working_directory {
758 where_clauses.push(format!("s.working_directory LIKE ?{idx}"));
759 param_values.push(Box::new(format!("{wd}%")));
760 idx += 1;
761 }
762
763 if let Some(ref repo) = filter.git_repo_name {
764 where_clauses.push(format!("s.git_repo_name = ?{idx}"));
765 param_values.push(Box::new(repo.clone()));
766 idx += 1;
767 }
768
769 let _ = idx; let where_str = where_clauses.join(" AND ");
772 let mut sql = format!(
773 "SELECT {LOCAL_SESSION_COLUMNS} \
774 {FROM_CLAUSE} WHERE {where_str} \
775 ORDER BY s.created_at DESC"
776 );
777
778 if let Some(limit) = filter.limit {
779 sql.push_str(" LIMIT ?");
780 param_values.push(Box::new(limit));
781 if let Some(offset) = filter.offset {
782 sql.push_str(" OFFSET ?");
783 param_values.push(Box::new(offset));
784 }
785 }
786
787 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
788 param_values.iter().map(|p| p.as_ref()).collect();
789 let conn = self.conn();
790 let mut stmt = conn.prepare(&sql)?;
791 let rows = stmt.query_map(param_refs.as_slice(), row_to_local_session)?;
792
793 let mut result = Vec::new();
794 for row in rows {
795 result.push(row?);
796 }
797 Ok(result)
798 }
799
800 pub fn get_sessions_by_tool_latest(
802 &self,
803 tool: &str,
804 count: u32,
805 ) -> Result<Vec<LocalSessionRow>> {
806 let sql = format!(
807 "SELECT {LOCAL_SESSION_COLUMNS} \
808 {FROM_CLAUSE} WHERE s.tool = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
809 ORDER BY s.created_at DESC"
810 );
811 let conn = self.conn();
812 let mut stmt = conn.prepare(&sql)?;
813 let rows = stmt.query_map(params![tool], row_to_local_session)?;
814 let mut result = Vec::new();
815 for row in rows {
816 result.push(row?);
817 }
818
819 result.truncate(count as usize);
820 Ok(result)
821 }
822
823 pub fn get_sessions_latest(&self, count: u32) -> Result<Vec<LocalSessionRow>> {
825 let sql = format!(
826 "SELECT {LOCAL_SESSION_COLUMNS} \
827 {FROM_CLAUSE} WHERE COALESCE(s.is_auxiliary, 0) = 0 \
828 ORDER BY s.created_at DESC"
829 );
830 let conn = self.conn();
831 let mut stmt = conn.prepare(&sql)?;
832 let rows = stmt.query_map([], row_to_local_session)?;
833 let mut result = Vec::new();
834 for row in rows {
835 result.push(row?);
836 }
837
838 result.truncate(count as usize);
839 Ok(result)
840 }
841
842 pub fn get_session_by_tool_offset(
844 &self,
845 tool: &str,
846 offset: u32,
847 ) -> Result<Option<LocalSessionRow>> {
848 let sql = format!(
849 "SELECT {LOCAL_SESSION_COLUMNS} \
850 {FROM_CLAUSE} WHERE s.tool = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
851 ORDER BY s.created_at DESC"
852 );
853 let conn = self.conn();
854 let mut stmt = conn.prepare(&sql)?;
855 let rows = stmt.query_map(params![tool], row_to_local_session)?;
856 let result = rows.collect::<Result<Vec<_>, _>>()?;
857 Ok(result.into_iter().nth(offset as usize))
858 }
859
860 pub fn get_session_by_offset(&self, offset: u32) -> Result<Option<LocalSessionRow>> {
862 let sql = format!(
863 "SELECT {LOCAL_SESSION_COLUMNS} \
864 {FROM_CLAUSE} WHERE COALESCE(s.is_auxiliary, 0) = 0 \
865 ORDER BY s.created_at DESC"
866 );
867 let conn = self.conn();
868 let mut stmt = conn.prepare(&sql)?;
869 let rows = stmt.query_map([], row_to_local_session)?;
870 let result = rows.collect::<Result<Vec<_>, _>>()?;
871 Ok(result.into_iter().nth(offset as usize))
872 }
873
874 pub fn get_session_source_path(&self, session_id: &str) -> Result<Option<String>> {
876 let conn = self.conn();
877 let result = conn
878 .query_row(
879 "SELECT source_path FROM session_sync WHERE session_id = ?1",
880 params![session_id],
881 |row| row.get(0),
882 )
883 .optional()?;
884
885 Ok(result)
886 }
887
888 pub fn session_count(&self) -> Result<i64> {
890 let count = self
891 .conn()
892 .query_row("SELECT COUNT(*) FROM sessions", [], |row| row.get(0))?;
893 Ok(count)
894 }
895
896 pub fn delete_session(&self, session_id: &str) -> Result<()> {
899 let conn = self.conn();
900 conn.execute(
901 "DELETE FROM body_cache WHERE session_id = ?1",
902 params![session_id],
903 )?;
904 conn.execute(
905 "DELETE FROM session_sync WHERE session_id = ?1",
906 params![session_id],
907 )?;
908 conn.execute("DELETE FROM sessions WHERE id = ?1", params![session_id])?;
909 Ok(())
910 }
911
912 pub fn get_sync_cursor(&self, team_id: &str) -> Result<Option<String>> {
915 let cursor = self
916 .conn()
917 .query_row(
918 "SELECT cursor FROM sync_cursors WHERE team_id = ?1",
919 params![team_id],
920 |row| row.get(0),
921 )
922 .optional()?;
923 Ok(cursor)
924 }
925
926 pub fn set_sync_cursor(&self, team_id: &str, cursor: &str) -> Result<()> {
927 self.conn().execute(
928 "INSERT INTO sync_cursors (team_id, cursor, updated_at) \
929 VALUES (?1, ?2, datetime('now')) \
930 ON CONFLICT(team_id) DO UPDATE SET cursor=excluded.cursor, updated_at=datetime('now')",
931 params![team_id, cursor],
932 )?;
933 Ok(())
934 }
935
936 pub fn pending_uploads(&self, team_id: &str) -> Result<Vec<LocalSessionRow>> {
940 let sql = format!(
941 "SELECT {LOCAL_SESSION_COLUMNS} \
942 FROM sessions s \
943 INNER JOIN session_sync ss ON ss.session_id = s.id \
944 LEFT JOIN users u ON u.id = s.user_id \
945 WHERE ss.sync_status = 'local_only' AND s.team_id = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
946 ORDER BY s.created_at ASC"
947 );
948 let conn = self.conn();
949 let mut stmt = conn.prepare(&sql)?;
950 let rows = stmt.query_map(params![team_id], row_to_local_session)?;
951 let mut result = Vec::new();
952 for row in rows {
953 result.push(row?);
954 }
955 Ok(result)
956 }
957
958 pub fn mark_synced(&self, session_id: &str) -> Result<()> {
959 self.conn().execute(
960 "UPDATE session_sync SET sync_status = 'synced', last_synced_at = datetime('now') \
961 WHERE session_id = ?1",
962 params![session_id],
963 )?;
964 Ok(())
965 }
966
967 pub fn was_uploaded_after(
969 &self,
970 source_path: &str,
971 modified: &chrono::DateTime<chrono::Utc>,
972 ) -> Result<bool> {
973 let result: Option<String> = self
974 .conn()
975 .query_row(
976 "SELECT last_synced_at FROM session_sync \
977 WHERE source_path = ?1 AND sync_status = 'synced' AND last_synced_at IS NOT NULL",
978 params![source_path],
979 |row| row.get(0),
980 )
981 .optional()?;
982
983 if let Some(synced_at) = result {
984 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&synced_at) {
985 return Ok(dt >= *modified);
986 }
987 }
988 Ok(false)
989 }
990
991 pub fn cache_body(&self, session_id: &str, body: &[u8]) -> Result<()> {
994 self.conn().execute(
995 "INSERT INTO body_cache (session_id, body, cached_at) \
996 VALUES (?1, ?2, datetime('now')) \
997 ON CONFLICT(session_id) DO UPDATE SET body=excluded.body, cached_at=datetime('now')",
998 params![session_id, body],
999 )?;
1000 Ok(())
1001 }
1002
1003 pub fn get_cached_body(&self, session_id: &str) -> Result<Option<Vec<u8>>> {
1004 let body = self
1005 .conn()
1006 .query_row(
1007 "SELECT body FROM body_cache WHERE session_id = ?1",
1008 params![session_id],
1009 |row| row.get(0),
1010 )
1011 .optional()?;
1012 Ok(body)
1013 }
1014
1015 pub fn find_active_session_for_repo(
1019 &self,
1020 repo_path: &str,
1021 since_minutes: u32,
1022 ) -> Result<Option<LocalSessionRow>> {
1023 let sql = format!(
1024 "SELECT {LOCAL_SESSION_COLUMNS} \
1025 {FROM_CLAUSE} \
1026 WHERE s.working_directory LIKE ?1 \
1027 AND COALESCE(s.is_auxiliary, 0) = 0 \
1028 AND s.created_at >= datetime('now', ?2) \
1029 ORDER BY s.created_at DESC LIMIT 1"
1030 );
1031 let since = format!("-{since_minutes} minutes");
1032 let like = format!("{repo_path}%");
1033 let conn = self.conn();
1034 let mut stmt = conn.prepare(&sql)?;
1035 let row = stmt
1036 .query_map(params![like, since], row_to_local_session)?
1037 .next()
1038 .transpose()?;
1039 Ok(row)
1040 }
1041
1042 pub fn existing_session_ids(&self) -> std::collections::HashSet<String> {
1044 let conn = self.conn();
1045 let mut stmt = conn
1046 .prepare("SELECT id FROM sessions")
1047 .unwrap_or_else(|_| panic!("failed to prepare existing_session_ids query"));
1048 let rows = stmt.query_map([], |row| row.get::<_, String>(0));
1049 let mut set = std::collections::HashSet::new();
1050 if let Ok(rows) = rows {
1051 for row in rows.flatten() {
1052 set.insert(row);
1053 }
1054 }
1055 set
1056 }
1057
1058 pub fn update_session_stats(&self, session: &Session) -> Result<()> {
1060 let title = session.context.title.as_deref();
1061 let description = session.context.description.as_deref();
1062 let (files_modified, files_read, has_errors) =
1063 opensession_core::extract::extract_file_metadata(session);
1064 let max_active_agents = opensession_core::agent_metrics::max_active_agents(session) as i64;
1065 let is_auxiliary = is_auxiliary_session(session);
1066
1067 self.conn().execute(
1068 "UPDATE sessions SET \
1069 title=?2, description=?3, \
1070 message_count=?4, user_message_count=?5, task_count=?6, \
1071 event_count=?7, duration_seconds=?8, \
1072 total_input_tokens=?9, total_output_tokens=?10, \
1073 files_modified=?11, files_read=?12, has_errors=?13, \
1074 max_active_agents=?14, is_auxiliary=?15 \
1075 WHERE id=?1",
1076 params![
1077 &session.session_id,
1078 title,
1079 description,
1080 session.stats.message_count as i64,
1081 session.stats.user_message_count as i64,
1082 session.stats.task_count as i64,
1083 session.stats.event_count as i64,
1084 session.stats.duration_seconds as i64,
1085 session.stats.total_input_tokens as i64,
1086 session.stats.total_output_tokens as i64,
1087 &files_modified,
1088 &files_read,
1089 has_errors,
1090 max_active_agents,
1091 is_auxiliary as i64,
1092 ],
1093 )?;
1094 Ok(())
1095 }
1096
1097 pub fn set_session_sync_path(&self, session_id: &str, source_path: &str) -> Result<()> {
1099 self.conn().execute(
1100 "INSERT INTO session_sync (session_id, source_path) \
1101 VALUES (?1, ?2) \
1102 ON CONFLICT(session_id) DO UPDATE SET source_path = excluded.source_path",
1103 params![session_id, source_path],
1104 )?;
1105 Ok(())
1106 }
1107
1108 pub fn list_repos(&self) -> Result<Vec<String>> {
1110 let conn = self.conn();
1111 let mut stmt = conn.prepare(
1112 "SELECT DISTINCT git_repo_name FROM sessions \
1113 WHERE git_repo_name IS NOT NULL AND COALESCE(is_auxiliary, 0) = 0 \
1114 ORDER BY git_repo_name ASC",
1115 )?;
1116 let rows = stmt.query_map([], |row| row.get(0))?;
1117 let mut result = Vec::new();
1118 for row in rows {
1119 result.push(row?);
1120 }
1121 Ok(result)
1122 }
1123
1124 pub fn list_working_directories(&self) -> Result<Vec<String>> {
1126 let conn = self.conn();
1127 let mut stmt = conn.prepare(
1128 "SELECT DISTINCT working_directory FROM sessions \
1129 WHERE working_directory IS NOT NULL AND TRIM(working_directory) <> '' \
1130 AND COALESCE(is_auxiliary, 0) = 0 \
1131 ORDER BY working_directory ASC",
1132 )?;
1133 let rows = stmt.query_map([], |row| row.get(0))?;
1134 let mut result = Vec::new();
1135 for row in rows {
1136 result.push(row?);
1137 }
1138 Ok(result)
1139 }
1140}
1141
1142fn open_connection_with_latest_schema(path: &PathBuf) -> Result<Connection> {
1145 let conn = Connection::open(path).with_context(|| format!("open db {}", path.display()))?;
1146 conn.execute_batch("PRAGMA journal_mode=WAL;")?;
1147
1148 conn.execute_batch("PRAGMA foreign_keys=OFF;")?;
1150
1151 apply_local_migrations(&conn)?;
1152 repair_session_tools_from_source_path(&conn)?;
1153 validate_local_schema(&conn)?;
1154
1155 Ok(conn)
1156}
1157
1158fn apply_local_migrations(conn: &Connection) -> Result<()> {
1159 conn.execute_batch(
1160 "CREATE TABLE IF NOT EXISTS _migrations (
1161 id INTEGER PRIMARY KEY,
1162 name TEXT NOT NULL UNIQUE,
1163 applied_at TEXT NOT NULL DEFAULT (datetime('now'))
1164 );",
1165 )
1166 .context("create _migrations table for local db")?;
1167
1168 for (name, sql) in MIGRATIONS.iter().chain(LOCAL_MIGRATIONS.iter()) {
1169 let already_applied: bool = conn
1170 .query_row(
1171 "SELECT COUNT(*) > 0 FROM _migrations WHERE name = ?1",
1172 [name],
1173 |row| row.get(0),
1174 )
1175 .unwrap_or(false);
1176
1177 if already_applied {
1178 continue;
1179 }
1180
1181 conn.execute_batch(sql)
1182 .with_context(|| format!("apply local migration {name}"))?;
1183
1184 conn.execute(
1185 "INSERT OR IGNORE INTO _migrations (name) VALUES (?1)",
1186 [name],
1187 )
1188 .with_context(|| format!("record local migration {name}"))?;
1189 }
1190
1191 Ok(())
1192}
1193
1194fn validate_local_schema(conn: &Connection) -> Result<()> {
1195 let sql = format!("SELECT {LOCAL_SESSION_COLUMNS} {FROM_CLAUSE} WHERE 1=0");
1196 conn.prepare(&sql)
1197 .map(|_| ())
1198 .context("validate local session schema")
1199}
1200
1201fn repair_session_tools_from_source_path(conn: &Connection) -> Result<()> {
1202 let mut stmt = conn.prepare(
1203 "SELECT s.id, s.tool, ss.source_path \
1204 FROM sessions s \
1205 LEFT JOIN session_sync ss ON ss.session_id = s.id \
1206 WHERE ss.source_path IS NOT NULL",
1207 )?;
1208 let rows = stmt.query_map([], |row| {
1209 Ok((
1210 row.get::<_, String>(0)?,
1211 row.get::<_, String>(1)?,
1212 row.get::<_, Option<String>>(2)?,
1213 ))
1214 })?;
1215
1216 let mut updates: Vec<(String, String)> = Vec::new();
1217 for row in rows {
1218 let (id, current_tool, source_path) = row?;
1219 let normalized = normalize_tool_for_source_path(¤t_tool, source_path.as_deref());
1220 if normalized != current_tool {
1221 updates.push((id, normalized));
1222 }
1223 }
1224 drop(stmt);
1225
1226 for (id, tool) in updates {
1227 conn.execute(
1228 "UPDATE sessions SET tool = ?1 WHERE id = ?2",
1229 params![tool, id],
1230 )?;
1231 }
1232
1233 Ok(())
1234}
1235
1236pub const LOCAL_SESSION_COLUMNS: &str = "\
1238s.id, ss.source_path, COALESCE(ss.sync_status, 'unknown') AS sync_status, ss.last_synced_at, \
1239s.user_id, u.nickname, s.team_id, s.tool, s.agent_provider, s.agent_model, \
1240s.title, s.description, s.tags, s.created_at, s.uploaded_at, \
1241s.message_count, COALESCE(s.user_message_count, 0), s.task_count, s.event_count, s.duration_seconds, \
1242s.total_input_tokens, s.total_output_tokens, \
1243s.git_remote, s.git_branch, s.git_commit, s.git_repo_name, \
1244s.pr_number, s.pr_url, s.working_directory, \
1245s.files_modified, s.files_read, s.has_errors, COALESCE(s.max_active_agents, 1), COALESCE(s.is_auxiliary, 0)";
1246
1247fn row_to_local_session(row: &rusqlite::Row) -> rusqlite::Result<LocalSessionRow> {
1248 let source_path: Option<String> = row.get(1)?;
1249 let tool: String = row.get(7)?;
1250 let normalized_tool = normalize_tool_for_source_path(&tool, source_path.as_deref());
1251
1252 Ok(LocalSessionRow {
1253 id: row.get(0)?,
1254 source_path,
1255 sync_status: row.get(2)?,
1256 last_synced_at: row.get(3)?,
1257 user_id: row.get(4)?,
1258 nickname: row.get(5)?,
1259 team_id: row.get(6)?,
1260 tool: normalized_tool,
1261 agent_provider: row.get(8)?,
1262 agent_model: row.get(9)?,
1263 title: row.get(10)?,
1264 description: row.get(11)?,
1265 tags: row.get(12)?,
1266 created_at: row.get(13)?,
1267 uploaded_at: row.get(14)?,
1268 message_count: row.get(15)?,
1269 user_message_count: row.get(16)?,
1270 task_count: row.get(17)?,
1271 event_count: row.get(18)?,
1272 duration_seconds: row.get(19)?,
1273 total_input_tokens: row.get(20)?,
1274 total_output_tokens: row.get(21)?,
1275 git_remote: row.get(22)?,
1276 git_branch: row.get(23)?,
1277 git_commit: row.get(24)?,
1278 git_repo_name: row.get(25)?,
1279 pr_number: row.get(26)?,
1280 pr_url: row.get(27)?,
1281 working_directory: row.get(28)?,
1282 files_modified: row.get(29)?,
1283 files_read: row.get(30)?,
1284 has_errors: row.get::<_, i64>(31).unwrap_or(0) != 0,
1285 max_active_agents: row.get(32).unwrap_or(1),
1286 is_auxiliary: row.get::<_, i64>(33).unwrap_or(0) != 0,
1287 })
1288}
1289
1290fn default_db_path() -> Result<PathBuf> {
1291 let home = std::env::var("HOME")
1292 .or_else(|_| std::env::var("USERPROFILE"))
1293 .context("Could not determine home directory")?;
1294 Ok(PathBuf::from(home)
1295 .join(".local")
1296 .join("share")
1297 .join("opensession")
1298 .join("local.db"))
1299}
1300
1301#[cfg(test)]
1302mod tests {
1303 use super::*;
1304
1305 use std::fs::{create_dir_all, write};
1306 use tempfile::tempdir;
1307
1308 fn test_db() -> LocalDb {
1309 let dir = tempdir().unwrap();
1310 let path = dir.keep().join("test.db");
1311 LocalDb::open_path(&path).unwrap()
1312 }
1313
1314 fn temp_root() -> tempfile::TempDir {
1315 tempdir().unwrap()
1316 }
1317
1318 fn make_row(id: &str, tool: &str, source_path: Option<&str>) -> LocalSessionRow {
1319 LocalSessionRow {
1320 id: id.to_string(),
1321 source_path: source_path.map(String::from),
1322 sync_status: "local_only".to_string(),
1323 last_synced_at: None,
1324 user_id: None,
1325 nickname: None,
1326 team_id: None,
1327 tool: tool.to_string(),
1328 agent_provider: None,
1329 agent_model: None,
1330 title: Some("test".to_string()),
1331 description: None,
1332 tags: None,
1333 created_at: "2024-01-01T00:00:00Z".to_string(),
1334 uploaded_at: None,
1335 message_count: 0,
1336 user_message_count: 0,
1337 task_count: 0,
1338 event_count: 0,
1339 duration_seconds: 0,
1340 total_input_tokens: 0,
1341 total_output_tokens: 0,
1342 git_remote: None,
1343 git_branch: None,
1344 git_commit: None,
1345 git_repo_name: None,
1346 pr_number: None,
1347 pr_url: None,
1348 working_directory: None,
1349 files_modified: None,
1350 files_read: None,
1351 has_errors: false,
1352 max_active_agents: 1,
1353 is_auxiliary: false,
1354 }
1355 }
1356
1357 #[test]
1358 fn test_open_and_schema() {
1359 let _db = test_db();
1360 }
1361
1362 #[test]
1363 fn test_open_repairs_codex_tool_hint_from_source_path() {
1364 let dir = tempfile::tempdir().unwrap();
1365 let path = dir.path().join("repair.db");
1366
1367 {
1368 let _ = LocalDb::open_path(&path).unwrap();
1369 }
1370
1371 {
1372 let conn = Connection::open(&path).unwrap();
1373 conn.execute(
1374 "INSERT INTO sessions (id, team_id, tool, created_at, body_storage_key) VALUES (?1, 'personal', 'claude-code', ?2, '')",
1375 params!["rollout-repair", "2026-02-20T00:00:00Z"],
1376 )
1377 .unwrap();
1378 conn.execute(
1379 "INSERT INTO session_sync (session_id, source_path, sync_status) VALUES (?1, ?2, 'local_only')",
1380 params!["rollout-repair", "/Users/test/.codex/sessions/2026/02/20/rollout-repair.jsonl"],
1381 )
1382 .unwrap();
1383 }
1384
1385 let db = LocalDb::open_path(&path).unwrap();
1386 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1387 let row = rows
1388 .iter()
1389 .find(|row| row.id == "rollout-repair")
1390 .expect("repaired row");
1391 assert_eq!(row.tool, "codex");
1392 }
1393
1394 #[test]
1395 fn test_upsert_local_session_normalizes_tool_from_source_path() {
1396 let db = test_db();
1397 let mut session = Session::new(
1398 "rollout-upsert".to_string(),
1399 opensession_core::trace::Agent {
1400 provider: "openai".to_string(),
1401 model: "gpt-5".to_string(),
1402 tool: "claude-code".to_string(),
1403 tool_version: None,
1404 },
1405 );
1406 session.stats.event_count = 1;
1407
1408 db.upsert_local_session(
1409 &session,
1410 "/Users/test/.codex/sessions/2026/02/20/rollout-upsert.jsonl",
1411 &crate::git::GitContext::default(),
1412 )
1413 .unwrap();
1414
1415 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1416 let row = rows
1417 .iter()
1418 .find(|row| row.id == "rollout-upsert")
1419 .expect("upserted row");
1420 assert_eq!(row.tool, "codex");
1421 }
1422
1423 #[test]
1424 fn test_upsert_local_session_preserves_existing_git_when_session_has_no_git_metadata() {
1425 let db = test_db();
1426 let mut session = Session::new(
1427 "preserve-git".to_string(),
1428 opensession_core::trace::Agent {
1429 provider: "openai".to_string(),
1430 model: "gpt-5".to_string(),
1431 tool: "codex".to_string(),
1432 tool_version: None,
1433 },
1434 );
1435 session.stats.event_count = 1;
1436
1437 let first_git = crate::git::GitContext {
1438 remote: Some("https://github.com/acme/repo.git".to_string()),
1439 branch: Some("feature/original".to_string()),
1440 commit: Some("1111111".to_string()),
1441 repo_name: Some("acme/repo".to_string()),
1442 };
1443 db.upsert_local_session(
1444 &session,
1445 "/Users/test/.codex/sessions/2026/02/20/preserve-git.jsonl",
1446 &first_git,
1447 )
1448 .unwrap();
1449
1450 let second_git = crate::git::GitContext {
1451 remote: Some("https://github.com/acme/repo.git".to_string()),
1452 branch: Some("feature/current-head".to_string()),
1453 commit: Some("2222222".to_string()),
1454 repo_name: Some("acme/repo".to_string()),
1455 };
1456 db.upsert_local_session(
1457 &session,
1458 "/Users/test/.codex/sessions/2026/02/20/preserve-git.jsonl",
1459 &second_git,
1460 )
1461 .unwrap();
1462
1463 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1464 let row = rows
1465 .iter()
1466 .find(|row| row.id == "preserve-git")
1467 .expect("row exists");
1468 assert_eq!(row.git_branch.as_deref(), Some("feature/original"));
1469 assert_eq!(row.git_commit.as_deref(), Some("1111111"));
1470 }
1471
1472 #[test]
1473 fn test_upsert_local_session_prefers_git_branch_from_session_attributes() {
1474 let db = test_db();
1475 let mut session = Session::new(
1476 "session-git-branch".to_string(),
1477 opensession_core::trace::Agent {
1478 provider: "anthropic".to_string(),
1479 model: "claude-opus-4-6".to_string(),
1480 tool: "claude-code".to_string(),
1481 tool_version: None,
1482 },
1483 );
1484 session.stats.event_count = 1;
1485 session.context.attributes.insert(
1486 "git_branch".to_string(),
1487 serde_json::Value::String("from-session".to_string()),
1488 );
1489
1490 let fallback_git = crate::git::GitContext {
1491 remote: Some("https://github.com/acme/repo.git".to_string()),
1492 branch: Some("fallback-branch".to_string()),
1493 commit: Some("aaaaaaaa".to_string()),
1494 repo_name: Some("acme/repo".to_string()),
1495 };
1496 db.upsert_local_session(
1497 &session,
1498 "/Users/test/.claude/projects/foo/session-git-branch.jsonl",
1499 &fallback_git,
1500 )
1501 .unwrap();
1502
1503 session.context.attributes.insert(
1504 "git_branch".to_string(),
1505 serde_json::Value::String("from-session-updated".to_string()),
1506 );
1507 db.upsert_local_session(
1508 &session,
1509 "/Users/test/.claude/projects/foo/session-git-branch.jsonl",
1510 &fallback_git,
1511 )
1512 .unwrap();
1513
1514 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1515 let row = rows
1516 .iter()
1517 .find(|row| row.id == "session-git-branch")
1518 .expect("row exists");
1519 assert_eq!(row.git_branch.as_deref(), Some("from-session-updated"));
1520 }
1521
1522 #[test]
1523 fn test_is_opencode_child_session() {
1524 let root = temp_root();
1525 let dir = root.path().join("sessions");
1526 create_dir_all(&dir).unwrap();
1527 let parent_session = dir.join("parent.json");
1528 write(
1529 &parent_session,
1530 r#"{"id":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1531 )
1532 .unwrap();
1533 let child_session = dir.join("child.json");
1534 write(
1535 &child_session,
1536 r#"{"id":"ses_child","parentID":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1537 )
1538 .unwrap();
1539
1540 let parent = make_row(
1541 "ses_parent",
1542 "opencode",
1543 Some(parent_session.to_str().unwrap()),
1544 );
1545 let mut child = make_row(
1546 "ses_child",
1547 "opencode",
1548 Some(child_session.to_str().unwrap()),
1549 );
1550 child.is_auxiliary = true;
1551 let mut codex = make_row("ses_other", "codex", Some(child_session.to_str().unwrap()));
1552 codex.is_auxiliary = true;
1553
1554 assert!(!is_opencode_child_session(&parent));
1555 assert!(is_opencode_child_session(&child));
1556 assert!(!is_opencode_child_session(&codex));
1557 }
1558
1559 #[allow(deprecated)]
1560 #[test]
1561 fn test_parse_opencode_parent_session_id_aliases() {
1562 let root = temp_root();
1563 let dir = root.path().join("session-aliases");
1564 create_dir_all(&dir).unwrap();
1565 let child_session = dir.join("child.json");
1566 write(
1567 &child_session,
1568 r#"{"id":"ses_child","parentUUID":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1569 )
1570 .unwrap();
1571 assert_eq!(
1572 parse_opencode_parent_session_id(child_session.to_str().unwrap()).as_deref(),
1573 Some("ses_parent")
1574 );
1575 }
1576
1577 #[allow(deprecated)]
1578 #[test]
1579 fn test_parse_opencode_parent_session_id_nested_metadata() {
1580 let root = temp_root();
1581 let dir = root.path().join("session-nested");
1582 create_dir_all(&dir).unwrap();
1583 let child_session = dir.join("child.json");
1584 write(
1585 &child_session,
1586 r#"{"id":"ses_child","metadata":{"links":{"parentSessionId":"ses_parent","trace":"x"}}}"#,
1587 )
1588 .unwrap();
1589 assert_eq!(
1590 parse_opencode_parent_session_id(child_session.to_str().unwrap()).as_deref(),
1591 Some("ses_parent")
1592 );
1593 }
1594
1595 #[test]
1596 fn test_hide_opencode_child_sessions() {
1597 let root = temp_root();
1598 let dir = root.path().join("sessions");
1599 create_dir_all(&dir).unwrap();
1600 let parent_session = dir.join("parent.json");
1601 let child_session = dir.join("child.json");
1602 let orphan_session = dir.join("orphan.json");
1603
1604 write(
1605 &parent_session,
1606 r#"{"id":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1607 )
1608 .unwrap();
1609 write(
1610 &child_session,
1611 r#"{"id":"ses_child","parentID":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1612 )
1613 .unwrap();
1614 write(
1615 &orphan_session,
1616 r#"{"id":"ses_orphan","time":{"created":1000,"updated":1000}}"#,
1617 )
1618 .unwrap();
1619
1620 let rows = vec![
1621 {
1622 let mut row = make_row(
1623 "ses_child",
1624 "opencode",
1625 Some(child_session.to_str().unwrap()),
1626 );
1627 row.is_auxiliary = true;
1628 row
1629 },
1630 make_row(
1631 "ses_parent",
1632 "opencode",
1633 Some(parent_session.to_str().unwrap()),
1634 ),
1635 {
1636 let mut row = make_row("ses_other", "codex", None);
1637 row.user_message_count = 1;
1638 row
1639 },
1640 make_row(
1641 "ses_orphan",
1642 "opencode",
1643 Some(orphan_session.to_str().unwrap()),
1644 ),
1645 ];
1646
1647 let filtered = hide_opencode_child_sessions(rows);
1648 assert_eq!(filtered.len(), 3);
1649 assert!(filtered.iter().all(|r| r.id != "ses_child"));
1650 }
1651
1652 #[test]
1653 fn test_sync_cursor() {
1654 let db = test_db();
1655 assert_eq!(db.get_sync_cursor("team1").unwrap(), None);
1656 db.set_sync_cursor("team1", "2024-01-01T00:00:00Z").unwrap();
1657 assert_eq!(
1658 db.get_sync_cursor("team1").unwrap(),
1659 Some("2024-01-01T00:00:00Z".to_string())
1660 );
1661 db.set_sync_cursor("team1", "2024-06-01T00:00:00Z").unwrap();
1663 assert_eq!(
1664 db.get_sync_cursor("team1").unwrap(),
1665 Some("2024-06-01T00:00:00Z".to_string())
1666 );
1667 }
1668
1669 #[test]
1670 fn test_body_cache() {
1671 let db = test_db();
1672 assert_eq!(db.get_cached_body("s1").unwrap(), None);
1673 db.cache_body("s1", b"hello world").unwrap();
1674 assert_eq!(
1675 db.get_cached_body("s1").unwrap(),
1676 Some(b"hello world".to_vec())
1677 );
1678 }
1679
1680 #[test]
1681 fn test_local_migrations_are_loaded_from_api_crate() {
1682 let migration_names: Vec<&str> = super::LOCAL_MIGRATIONS
1683 .iter()
1684 .map(|(name, _)| *name)
1685 .collect();
1686 assert!(
1687 migration_names.contains(&"local_0001_schema"),
1688 "expected local_0001_schema migration from opensession-api"
1689 );
1690 assert_eq!(
1691 migration_names.len(),
1692 1,
1693 "local schema should be single-step"
1694 );
1695
1696 let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
1697 let migrations_dir = manifest_dir.join("migrations");
1698 if migrations_dir.exists() {
1699 let sql_files = std::fs::read_dir(migrations_dir)
1700 .expect("read local-db migrations directory")
1701 .filter_map(Result::ok)
1702 .map(|entry| entry.file_name().to_string_lossy().to_string())
1703 .filter(|name| name.ends_with(".sql"))
1704 .collect::<Vec<_>>();
1705 assert!(
1706 sql_files.is_empty(),
1707 "local-db must not ship duplicated migration SQL files"
1708 );
1709 }
1710 }
1711
1712 #[test]
1713 fn test_local_schema_bootstrap_includes_is_auxiliary_column() {
1714 let dir = tempdir().unwrap();
1715 let path = dir.path().join("local.db");
1716 let db = LocalDb::open_path(&path).unwrap();
1717 let conn = db.conn();
1718 let mut stmt = conn.prepare("PRAGMA table_info(sessions)").unwrap();
1719 let columns = stmt
1720 .query_map([], |row| row.get::<_, String>(1))
1721 .unwrap()
1722 .collect::<std::result::Result<Vec<_>, _>>()
1723 .unwrap();
1724 assert!(
1725 columns.iter().any(|name| name == "is_auxiliary"),
1726 "sessions schema must include is_auxiliary column in bootstrap migration"
1727 );
1728 }
1729
1730 #[test]
1731 fn test_upsert_remote_session() {
1732 let db = test_db();
1733 let summary = RemoteSessionSummary {
1734 id: "remote-1".to_string(),
1735 user_id: Some("u1".to_string()),
1736 nickname: Some("alice".to_string()),
1737 team_id: "t1".to_string(),
1738 tool: "claude-code".to_string(),
1739 agent_provider: None,
1740 agent_model: None,
1741 title: Some("Test session".to_string()),
1742 description: None,
1743 tags: None,
1744 created_at: "2024-01-01T00:00:00Z".to_string(),
1745 uploaded_at: "2024-01-01T01:00:00Z".to_string(),
1746 message_count: 10,
1747 task_count: 2,
1748 event_count: 20,
1749 duration_seconds: 300,
1750 total_input_tokens: 1000,
1751 total_output_tokens: 500,
1752 git_remote: None,
1753 git_branch: None,
1754 git_commit: None,
1755 git_repo_name: None,
1756 pr_number: None,
1757 pr_url: None,
1758 working_directory: None,
1759 files_modified: None,
1760 files_read: None,
1761 has_errors: false,
1762 max_active_agents: 1,
1763 };
1764 db.upsert_remote_session(&summary).unwrap();
1765
1766 let sessions = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1767 assert_eq!(sessions.len(), 1);
1768 assert_eq!(sessions[0].id, "remote-1");
1769 assert_eq!(sessions[0].sync_status, "remote_only");
1770 assert_eq!(sessions[0].nickname, None); assert!(!sessions[0].is_auxiliary);
1772 }
1773
1774 #[test]
1775 fn test_list_filter_by_repo() {
1776 let db = test_db();
1777 let summary1 = RemoteSessionSummary {
1779 id: "s1".to_string(),
1780 user_id: None,
1781 nickname: None,
1782 team_id: "t1".to_string(),
1783 tool: "claude-code".to_string(),
1784 agent_provider: None,
1785 agent_model: None,
1786 title: Some("Session 1".to_string()),
1787 description: None,
1788 tags: None,
1789 created_at: "2024-01-01T00:00:00Z".to_string(),
1790 uploaded_at: "2024-01-01T01:00:00Z".to_string(),
1791 message_count: 5,
1792 task_count: 0,
1793 event_count: 10,
1794 duration_seconds: 60,
1795 total_input_tokens: 100,
1796 total_output_tokens: 50,
1797 git_remote: None,
1798 git_branch: None,
1799 git_commit: None,
1800 git_repo_name: None,
1801 pr_number: None,
1802 pr_url: None,
1803 working_directory: None,
1804 files_modified: None,
1805 files_read: None,
1806 has_errors: false,
1807 max_active_agents: 1,
1808 };
1809 db.upsert_remote_session(&summary1).unwrap();
1810
1811 let filter = LocalSessionFilter {
1813 team_id: Some("t1".to_string()),
1814 ..Default::default()
1815 };
1816 assert_eq!(db.list_sessions(&filter).unwrap().len(), 1);
1817
1818 let filter = LocalSessionFilter {
1819 team_id: Some("t999".to_string()),
1820 ..Default::default()
1821 };
1822 assert_eq!(db.list_sessions(&filter).unwrap().len(), 0);
1823 }
1824
1825 fn make_summary(id: &str, tool: &str, title: &str, created_at: &str) -> RemoteSessionSummary {
1828 RemoteSessionSummary {
1829 id: id.to_string(),
1830 user_id: None,
1831 nickname: None,
1832 team_id: "t1".to_string(),
1833 tool: tool.to_string(),
1834 agent_provider: Some("anthropic".to_string()),
1835 agent_model: Some("claude-opus-4-6".to_string()),
1836 title: Some(title.to_string()),
1837 description: None,
1838 tags: None,
1839 created_at: created_at.to_string(),
1840 uploaded_at: created_at.to_string(),
1841 message_count: 5,
1842 task_count: 1,
1843 event_count: 10,
1844 duration_seconds: 300,
1845 total_input_tokens: 1000,
1846 total_output_tokens: 500,
1847 git_remote: None,
1848 git_branch: None,
1849 git_commit: None,
1850 git_repo_name: None,
1851 pr_number: None,
1852 pr_url: None,
1853 working_directory: None,
1854 files_modified: None,
1855 files_read: None,
1856 has_errors: false,
1857 max_active_agents: 1,
1858 }
1859 }
1860
1861 fn seed_sessions(db: &LocalDb) {
1862 db.upsert_remote_session(&make_summary(
1864 "s1",
1865 "claude-code",
1866 "First session",
1867 "2024-01-01T00:00:00Z",
1868 ))
1869 .unwrap();
1870 db.upsert_remote_session(&make_summary(
1871 "s2",
1872 "claude-code",
1873 "JWT auth work",
1874 "2024-01-02T00:00:00Z",
1875 ))
1876 .unwrap();
1877 db.upsert_remote_session(&make_summary(
1878 "s3",
1879 "gemini",
1880 "Gemini test",
1881 "2024-01-03T00:00:00Z",
1882 ))
1883 .unwrap();
1884 db.upsert_remote_session(&make_summary(
1885 "s4",
1886 "claude-code",
1887 "Error handling",
1888 "2024-01-04T00:00:00Z",
1889 ))
1890 .unwrap();
1891 db.upsert_remote_session(&make_summary(
1892 "s5",
1893 "claude-code",
1894 "Final polish",
1895 "2024-01-05T00:00:00Z",
1896 ))
1897 .unwrap();
1898 }
1899
1900 #[test]
1903 fn test_log_no_filters() {
1904 let db = test_db();
1905 seed_sessions(&db);
1906 let filter = LogFilter::default();
1907 let results = db.list_sessions_log(&filter).unwrap();
1908 assert_eq!(results.len(), 5);
1909 assert_eq!(results[0].id, "s5");
1911 assert_eq!(results[4].id, "s1");
1912 }
1913
1914 #[test]
1915 fn test_log_filter_by_tool() {
1916 let db = test_db();
1917 seed_sessions(&db);
1918 let filter = LogFilter {
1919 tool: Some("claude-code".to_string()),
1920 ..Default::default()
1921 };
1922 let results = db.list_sessions_log(&filter).unwrap();
1923 assert_eq!(results.len(), 4);
1924 assert!(results.iter().all(|s| s.tool == "claude-code"));
1925 }
1926
1927 #[test]
1928 fn test_log_filter_by_model_wildcard() {
1929 let db = test_db();
1930 seed_sessions(&db);
1931 let filter = LogFilter {
1932 model: Some("claude*".to_string()),
1933 ..Default::default()
1934 };
1935 let results = db.list_sessions_log(&filter).unwrap();
1936 assert_eq!(results.len(), 5); }
1938
1939 #[test]
1940 fn test_log_filter_since() {
1941 let db = test_db();
1942 seed_sessions(&db);
1943 let filter = LogFilter {
1944 since: Some("2024-01-03T00:00:00Z".to_string()),
1945 ..Default::default()
1946 };
1947 let results = db.list_sessions_log(&filter).unwrap();
1948 assert_eq!(results.len(), 3); }
1950
1951 #[test]
1952 fn test_log_filter_before() {
1953 let db = test_db();
1954 seed_sessions(&db);
1955 let filter = LogFilter {
1956 before: Some("2024-01-03T00:00:00Z".to_string()),
1957 ..Default::default()
1958 };
1959 let results = db.list_sessions_log(&filter).unwrap();
1960 assert_eq!(results.len(), 2); }
1962
1963 #[test]
1964 fn test_log_filter_since_and_before() {
1965 let db = test_db();
1966 seed_sessions(&db);
1967 let filter = LogFilter {
1968 since: Some("2024-01-02T00:00:00Z".to_string()),
1969 before: Some("2024-01-04T00:00:00Z".to_string()),
1970 ..Default::default()
1971 };
1972 let results = db.list_sessions_log(&filter).unwrap();
1973 assert_eq!(results.len(), 2); }
1975
1976 #[test]
1977 fn test_log_filter_grep() {
1978 let db = test_db();
1979 seed_sessions(&db);
1980 let filter = LogFilter {
1981 grep: Some("JWT".to_string()),
1982 ..Default::default()
1983 };
1984 let results = db.list_sessions_log(&filter).unwrap();
1985 assert_eq!(results.len(), 1);
1986 assert_eq!(results[0].id, "s2");
1987 }
1988
1989 #[test]
1990 fn test_log_limit_and_offset() {
1991 let db = test_db();
1992 seed_sessions(&db);
1993 let filter = LogFilter {
1994 limit: Some(2),
1995 offset: Some(1),
1996 ..Default::default()
1997 };
1998 let results = db.list_sessions_log(&filter).unwrap();
1999 assert_eq!(results.len(), 2);
2000 assert_eq!(results[0].id, "s4"); assert_eq!(results[1].id, "s3");
2002 }
2003
2004 #[test]
2005 fn test_log_limit_only() {
2006 let db = test_db();
2007 seed_sessions(&db);
2008 let filter = LogFilter {
2009 limit: Some(3),
2010 ..Default::default()
2011 };
2012 let results = db.list_sessions_log(&filter).unwrap();
2013 assert_eq!(results.len(), 3);
2014 }
2015
2016 #[test]
2017 fn test_list_sessions_limit_offset() {
2018 let db = test_db();
2019 seed_sessions(&db);
2020 let filter = LocalSessionFilter {
2021 limit: Some(2),
2022 offset: Some(1),
2023 ..Default::default()
2024 };
2025 let results = db.list_sessions(&filter).unwrap();
2026 assert_eq!(results.len(), 2);
2027 assert_eq!(results[0].id, "s4");
2028 assert_eq!(results[1].id, "s3");
2029 }
2030
2031 #[test]
2032 fn test_count_sessions_filtered() {
2033 let db = test_db();
2034 seed_sessions(&db);
2035 let count = db
2036 .count_sessions_filtered(&LocalSessionFilter::default())
2037 .unwrap();
2038 assert_eq!(count, 5);
2039 }
2040
2041 #[test]
2042 fn test_list_and_count_filters_match_when_auxiliary_rows_exist() {
2043 let db = test_db();
2044 seed_sessions(&db);
2045 db.conn()
2046 .execute(
2047 "UPDATE sessions SET is_auxiliary = 1 WHERE id IN ('s2', 's3')",
2048 [],
2049 )
2050 .unwrap();
2051
2052 let default_filter = LocalSessionFilter::default();
2053 let rows = db.list_sessions(&default_filter).unwrap();
2054 let count = db.count_sessions_filtered(&default_filter).unwrap();
2055 assert_eq!(rows.len() as i64, count);
2056 assert!(rows.iter().all(|row| !row.is_auxiliary));
2057
2058 let gemini_filter = LocalSessionFilter {
2059 tool: Some("gemini".to_string()),
2060 ..Default::default()
2061 };
2062 let gemini_rows = db.list_sessions(&gemini_filter).unwrap();
2063 let gemini_count = db.count_sessions_filtered(&gemini_filter).unwrap();
2064 assert_eq!(gemini_rows.len() as i64, gemini_count);
2065 assert!(gemini_rows.is_empty());
2066 assert_eq!(gemini_count, 0);
2067 }
2068
2069 #[test]
2070 fn test_exclude_low_signal_filter_hides_metadata_only_sessions() {
2071 let db = test_db();
2072
2073 let mut low_signal = make_summary("meta-only", "claude-code", "", "2024-01-01T00:00:00Z");
2074 low_signal.title = None;
2075 low_signal.message_count = 0;
2076 low_signal.task_count = 0;
2077 low_signal.event_count = 2;
2078 low_signal.git_repo_name = Some("frontend/aviss-react-front".to_string());
2079
2080 let mut normal = make_summary(
2081 "real-work",
2082 "opencode",
2083 "Socket.IO decision",
2084 "2024-01-02T00:00:00Z",
2085 );
2086 normal.message_count = 14;
2087 normal.task_count = 2;
2088 normal.event_count = 38;
2089 normal.git_repo_name = Some("frontend/aviss-react-front".to_string());
2090
2091 db.upsert_remote_session(&low_signal).unwrap();
2092 db.upsert_remote_session(&normal).unwrap();
2093
2094 let default_filter = LocalSessionFilter {
2095 git_repo_name: Some("frontend/aviss-react-front".to_string()),
2096 ..Default::default()
2097 };
2098 assert_eq!(db.list_sessions(&default_filter).unwrap().len(), 2);
2099 assert_eq!(db.count_sessions_filtered(&default_filter).unwrap(), 2);
2100
2101 let repo_filter = LocalSessionFilter {
2102 git_repo_name: Some("frontend/aviss-react-front".to_string()),
2103 exclude_low_signal: true,
2104 ..Default::default()
2105 };
2106 let rows = db.list_sessions(&repo_filter).unwrap();
2107 assert_eq!(rows.len(), 1);
2108 assert_eq!(rows[0].id, "real-work");
2109 assert_eq!(db.count_sessions_filtered(&repo_filter).unwrap(), 1);
2110 }
2111
2112 #[test]
2113 fn test_list_working_directories_distinct_non_empty() {
2114 let db = test_db();
2115
2116 let mut a = make_summary("wd-1", "claude-code", "One", "2024-01-01T00:00:00Z");
2117 a.working_directory = Some("/tmp/repo-a".to_string());
2118 let mut b = make_summary("wd-2", "claude-code", "Two", "2024-01-02T00:00:00Z");
2119 b.working_directory = Some("/tmp/repo-a".to_string());
2120 let mut c = make_summary("wd-3", "claude-code", "Three", "2024-01-03T00:00:00Z");
2121 c.working_directory = Some("/tmp/repo-b".to_string());
2122 let mut d = make_summary("wd-4", "claude-code", "Four", "2024-01-04T00:00:00Z");
2123 d.working_directory = Some("".to_string());
2124
2125 db.upsert_remote_session(&a).unwrap();
2126 db.upsert_remote_session(&b).unwrap();
2127 db.upsert_remote_session(&c).unwrap();
2128 db.upsert_remote_session(&d).unwrap();
2129
2130 let dirs = db.list_working_directories().unwrap();
2131 assert_eq!(
2132 dirs,
2133 vec!["/tmp/repo-a".to_string(), "/tmp/repo-b".to_string()]
2134 );
2135 }
2136
2137 #[test]
2138 fn test_list_session_tools() {
2139 let db = test_db();
2140 seed_sessions(&db);
2141 let tools = db
2142 .list_session_tools(&LocalSessionFilter::default())
2143 .unwrap();
2144 assert_eq!(tools, vec!["claude-code".to_string(), "gemini".to_string()]);
2145 }
2146
2147 #[test]
2148 fn test_log_combined_filters() {
2149 let db = test_db();
2150 seed_sessions(&db);
2151 let filter = LogFilter {
2152 tool: Some("claude-code".to_string()),
2153 since: Some("2024-01-03T00:00:00Z".to_string()),
2154 limit: Some(1),
2155 ..Default::default()
2156 };
2157 let results = db.list_sessions_log(&filter).unwrap();
2158 assert_eq!(results.len(), 1);
2159 assert_eq!(results[0].id, "s5"); }
2161
2162 #[test]
2165 fn test_get_session_by_offset() {
2166 let db = test_db();
2167 seed_sessions(&db);
2168 let row = db.get_session_by_offset(0).unwrap().unwrap();
2169 assert_eq!(row.id, "s5"); let row = db.get_session_by_offset(2).unwrap().unwrap();
2171 assert_eq!(row.id, "s3");
2172 assert!(db.get_session_by_offset(10).unwrap().is_none());
2173 }
2174
2175 #[test]
2176 fn test_get_session_by_tool_offset() {
2177 let db = test_db();
2178 seed_sessions(&db);
2179 let row = db
2180 .get_session_by_tool_offset("claude-code", 0)
2181 .unwrap()
2182 .unwrap();
2183 assert_eq!(row.id, "s5");
2184 let row = db
2185 .get_session_by_tool_offset("claude-code", 1)
2186 .unwrap()
2187 .unwrap();
2188 assert_eq!(row.id, "s4");
2189 let row = db.get_session_by_tool_offset("gemini", 0).unwrap().unwrap();
2190 assert_eq!(row.id, "s3");
2191 assert!(db
2192 .get_session_by_tool_offset("gemini", 1)
2193 .unwrap()
2194 .is_none());
2195 }
2196
2197 #[test]
2198 fn test_get_sessions_latest() {
2199 let db = test_db();
2200 seed_sessions(&db);
2201 let rows = db.get_sessions_latest(3).unwrap();
2202 assert_eq!(rows.len(), 3);
2203 assert_eq!(rows[0].id, "s5");
2204 assert_eq!(rows[1].id, "s4");
2205 assert_eq!(rows[2].id, "s3");
2206 }
2207
2208 #[test]
2209 fn test_get_sessions_by_tool_latest() {
2210 let db = test_db();
2211 seed_sessions(&db);
2212 let rows = db.get_sessions_by_tool_latest("claude-code", 2).unwrap();
2213 assert_eq!(rows.len(), 2);
2214 assert_eq!(rows[0].id, "s5");
2215 assert_eq!(rows[1].id, "s4");
2216 }
2217
2218 #[test]
2219 fn test_get_sessions_latest_more_than_available() {
2220 let db = test_db();
2221 seed_sessions(&db);
2222 let rows = db.get_sessions_by_tool_latest("gemini", 10).unwrap();
2223 assert_eq!(rows.len(), 1); }
2225
2226 #[test]
2227 fn test_session_count() {
2228 let db = test_db();
2229 assert_eq!(db.session_count().unwrap(), 0);
2230 seed_sessions(&db);
2231 assert_eq!(db.session_count().unwrap(), 5);
2232 }
2233}