1pub mod git;
2
3use anyhow::{Context, Result};
4use opensession_api::db::migrations::{LOCAL_MIGRATIONS, MIGRATIONS};
5use opensession_core::session::{is_auxiliary_session, working_directory};
6use opensession_core::trace::Session;
7use rusqlite::{params, Connection, OptionalExtension};
8use serde_json::Value;
9use std::fs;
10use std::io::{BufRead, BufReader};
11use std::path::PathBuf;
12use std::sync::Mutex;
13
14use git::{normalize_repo_name, GitContext};
15
16#[derive(Debug, Clone)]
18pub struct LocalSessionRow {
19 pub id: String,
20 pub source_path: Option<String>,
21 pub sync_status: String,
22 pub last_synced_at: Option<String>,
23 pub user_id: Option<String>,
24 pub nickname: Option<String>,
25 pub team_id: Option<String>,
26 pub tool: String,
27 pub agent_provider: Option<String>,
28 pub agent_model: Option<String>,
29 pub title: Option<String>,
30 pub description: Option<String>,
31 pub tags: Option<String>,
32 pub created_at: String,
33 pub uploaded_at: Option<String>,
34 pub message_count: i64,
35 pub user_message_count: i64,
36 pub task_count: i64,
37 pub event_count: i64,
38 pub duration_seconds: i64,
39 pub total_input_tokens: i64,
40 pub total_output_tokens: i64,
41 pub git_remote: Option<String>,
42 pub git_branch: Option<String>,
43 pub git_commit: Option<String>,
44 pub git_repo_name: Option<String>,
45 pub pr_number: Option<i64>,
46 pub pr_url: Option<String>,
47 pub working_directory: Option<String>,
48 pub files_modified: Option<String>,
49 pub files_read: Option<String>,
50 pub has_errors: bool,
51 pub max_active_agents: i64,
52 pub is_auxiliary: bool,
53}
54
55pub fn is_opencode_child_session(row: &LocalSessionRow) -> bool {
57 row.tool == "opencode" && row.is_auxiliary
58}
59
60#[deprecated(
62 note = "Use parser/core canonical session role attributes instead of runtime file inspection"
63)]
64pub fn parse_opencode_parent_session_id(source_path: &str) -> Option<String> {
65 let text = fs::read_to_string(source_path).ok()?;
66 let json: Value = serde_json::from_str(&text).ok()?;
67 lookup_parent_session_id(&json)
68}
69
70fn lookup_parent_session_id(value: &Value) -> Option<String> {
71 match value {
72 Value::Object(obj) => {
73 for (key, value) in obj {
74 if is_parent_id_key(key) {
75 if let Some(parent_id) = value.as_str() {
76 let parent_id = parent_id.trim();
77 if !parent_id.is_empty() {
78 return Some(parent_id.to_string());
79 }
80 }
81 }
82 if let Some(parent_id) = lookup_parent_session_id(value) {
83 return Some(parent_id);
84 }
85 }
86 None
87 }
88 Value::Array(items) => items.iter().find_map(lookup_parent_session_id),
89 _ => None,
90 }
91}
92
93fn is_parent_id_key(key: &str) -> bool {
94 let flat = key
95 .chars()
96 .filter(|c| c.is_ascii_alphanumeric())
97 .map(|c| c.to_ascii_lowercase())
98 .collect::<String>();
99
100 flat == "parentid"
101 || flat == "parentuuid"
102 || flat == "parentsessionid"
103 || flat == "parentsessionuuid"
104 || flat.ends_with("parentsessionid")
105 || (flat.contains("parent") && flat.ends_with("id"))
106 || (flat.contains("parent") && flat.ends_with("uuid"))
107}
108
109pub fn hide_opencode_child_sessions(mut rows: Vec<LocalSessionRow>) -> Vec<LocalSessionRow> {
111 rows.retain(|row| !row.is_auxiliary);
112 rows
113}
114
115fn infer_tool_from_source_path(source_path: Option<&str>) -> Option<&'static str> {
116 let source_path = source_path.map(|path| path.to_ascii_lowercase())?;
117
118 if source_path.contains("/.codex/sessions/")
119 || source_path.contains("\\.codex\\sessions\\")
120 || source_path.contains("/codex/sessions/")
121 || source_path.contains("\\codex\\sessions\\")
122 {
123 return Some("codex");
124 }
125
126 if source_path.contains("/.claude/projects/")
127 || source_path.contains("\\.claude\\projects\\")
128 || source_path.contains("/claude/projects/")
129 || source_path.contains("\\claude\\projects\\")
130 {
131 return Some("claude-code");
132 }
133
134 None
135}
136
137fn normalize_tool_for_source_path(current_tool: &str, source_path: Option<&str>) -> String {
138 infer_tool_from_source_path(source_path)
139 .unwrap_or(current_tool)
140 .to_string()
141}
142
143fn normalize_non_empty(value: Option<&str>) -> Option<String> {
144 value
145 .map(str::trim)
146 .filter(|value| !value.is_empty())
147 .map(ToOwned::to_owned)
148}
149
150fn json_object_string(value: &Value, keys: &[&str]) -> Option<String> {
151 let obj = value.as_object()?;
152 for key in keys {
153 if let Some(found) = obj.get(*key).and_then(Value::as_str) {
154 let normalized = found.trim();
155 if !normalized.is_empty() {
156 return Some(normalized.to_string());
157 }
158 }
159 }
160 None
161}
162
163fn git_context_from_session_attributes(session: &Session) -> GitContext {
164 let attrs = &session.context.attributes;
165
166 let mut remote = normalize_non_empty(attrs.get("git_remote").and_then(Value::as_str));
167 let mut branch = normalize_non_empty(attrs.get("git_branch").and_then(Value::as_str));
168 let mut commit = normalize_non_empty(attrs.get("git_commit").and_then(Value::as_str));
169 let mut repo_name = normalize_non_empty(attrs.get("git_repo_name").and_then(Value::as_str));
170
171 if let Some(git_value) = attrs.get("git") {
172 if remote.is_none() {
173 remote = json_object_string(
174 git_value,
175 &["remote", "repository_url", "repo_url", "origin", "url"],
176 );
177 }
178 if branch.is_none() {
179 branch = json_object_string(
180 git_value,
181 &["branch", "git_branch", "current_branch", "ref", "head"],
182 );
183 }
184 if commit.is_none() {
185 commit = json_object_string(git_value, &["commit", "commit_hash", "sha", "git_commit"]);
186 }
187 if repo_name.is_none() {
188 repo_name = json_object_string(git_value, &["repo_name", "repository", "repo", "name"]);
189 }
190 }
191
192 if repo_name.is_none() {
193 repo_name = remote
194 .as_deref()
195 .and_then(normalize_repo_name)
196 .map(ToOwned::to_owned);
197 }
198
199 GitContext {
200 remote,
201 branch,
202 commit,
203 repo_name,
204 }
205}
206
207fn git_context_has_any_field(git: &GitContext) -> bool {
208 git.remote.is_some() || git.branch.is_some() || git.commit.is_some() || git.repo_name.is_some()
209}
210
211fn merge_git_context(preferred: &GitContext, fallback: &GitContext) -> GitContext {
212 GitContext {
213 remote: preferred.remote.clone().or_else(|| fallback.remote.clone()),
214 branch: preferred.branch.clone().or_else(|| fallback.branch.clone()),
215 commit: preferred.commit.clone().or_else(|| fallback.commit.clone()),
216 repo_name: preferred
217 .repo_name
218 .clone()
219 .or_else(|| fallback.repo_name.clone()),
220 }
221}
222
223#[derive(Debug, Clone)]
225pub struct LocalSessionFilter {
226 pub team_id: Option<String>,
227 pub sync_status: Option<String>,
228 pub git_repo_name: Option<String>,
229 pub search: Option<String>,
230 pub exclude_low_signal: bool,
231 pub tool: Option<String>,
232 pub sort: LocalSortOrder,
233 pub time_range: LocalTimeRange,
234 pub limit: Option<u32>,
235 pub offset: Option<u32>,
236}
237
238impl Default for LocalSessionFilter {
239 fn default() -> Self {
240 Self {
241 team_id: None,
242 sync_status: None,
243 git_repo_name: None,
244 search: None,
245 exclude_low_signal: false,
246 tool: None,
247 sort: LocalSortOrder::Recent,
248 time_range: LocalTimeRange::All,
249 limit: None,
250 offset: None,
251 }
252 }
253}
254
255#[derive(Debug, Clone, Default, PartialEq, Eq)]
257pub enum LocalSortOrder {
258 #[default]
259 Recent,
260 Popular,
261 Longest,
262}
263
264#[derive(Debug, Clone, Default, PartialEq, Eq)]
266pub enum LocalTimeRange {
267 Hours24,
268 Days7,
269 Days30,
270 #[default]
271 All,
272}
273
274#[derive(Debug, Clone)]
276pub struct RemoteSessionSummary {
277 pub id: String,
278 pub user_id: Option<String>,
279 pub nickname: Option<String>,
280 pub team_id: String,
281 pub tool: String,
282 pub agent_provider: Option<String>,
283 pub agent_model: Option<String>,
284 pub title: Option<String>,
285 pub description: Option<String>,
286 pub tags: Option<String>,
287 pub created_at: String,
288 pub uploaded_at: String,
289 pub message_count: i64,
290 pub task_count: i64,
291 pub event_count: i64,
292 pub duration_seconds: i64,
293 pub total_input_tokens: i64,
294 pub total_output_tokens: i64,
295 pub git_remote: Option<String>,
296 pub git_branch: Option<String>,
297 pub git_commit: Option<String>,
298 pub git_repo_name: Option<String>,
299 pub pr_number: Option<i64>,
300 pub pr_url: Option<String>,
301 pub working_directory: Option<String>,
302 pub files_modified: Option<String>,
303 pub files_read: Option<String>,
304 pub has_errors: bool,
305 pub max_active_agents: i64,
306}
307
308#[derive(Debug, Default)]
310pub struct LogFilter {
311 pub tool: Option<String>,
313 pub model: Option<String>,
315 pub since: Option<String>,
317 pub before: Option<String>,
319 pub touches: Option<String>,
321 pub grep: Option<String>,
323 pub has_errors: Option<bool>,
325 pub working_directory: Option<String>,
327 pub git_repo_name: Option<String>,
329 pub limit: Option<u32>,
331 pub offset: Option<u32>,
333}
334
335const FROM_CLAUSE: &str = "\
337FROM sessions s \
338LEFT JOIN session_sync ss ON ss.session_id = s.id \
339LEFT JOIN users u ON u.id = s.user_id";
340
341pub struct LocalDb {
345 conn: Mutex<Connection>,
346}
347
348impl LocalDb {
349 pub fn open() -> Result<Self> {
352 let path = default_db_path()?;
353 Self::open_path(&path)
354 }
355
356 pub fn open_path(path: &PathBuf) -> Result<Self> {
358 if let Some(parent) = path.parent() {
359 std::fs::create_dir_all(parent)
360 .with_context(|| format!("create dir for {}", path.display()))?;
361 }
362 let conn = open_connection_with_latest_schema(path)
363 .with_context(|| format!("open local db {}", path.display()))?;
364 Ok(Self {
365 conn: Mutex::new(conn),
366 })
367 }
368
369 fn conn(&self) -> std::sync::MutexGuard<'_, Connection> {
370 self.conn.lock().expect("local db mutex poisoned")
371 }
372
373 pub fn upsert_local_session(
376 &self,
377 session: &Session,
378 source_path: &str,
379 git: &GitContext,
380 ) -> Result<()> {
381 let title = session.context.title.as_deref();
382 let description = session.context.description.as_deref();
383 let tags = if session.context.tags.is_empty() {
384 None
385 } else {
386 Some(session.context.tags.join(","))
387 };
388 let created_at = session.context.created_at.to_rfc3339();
389 let cwd = working_directory(session).map(String::from);
390 let is_auxiliary = is_auxiliary_session(session);
391
392 let (files_modified, files_read, has_errors) =
394 opensession_core::extract::extract_file_metadata(session);
395 let max_active_agents = opensession_core::agent_metrics::max_active_agents(session) as i64;
396 let normalized_tool =
397 normalize_tool_for_source_path(&session.agent.tool, Some(source_path));
398 let git_from_session = git_context_from_session_attributes(session);
399 let has_session_git = git_context_has_any_field(&git_from_session);
400 let merged_git = merge_git_context(&git_from_session, git);
401
402 let conn = self.conn();
403 conn.execute(
405 "INSERT INTO sessions \
406 (id, team_id, tool, agent_provider, agent_model, \
407 title, description, tags, created_at, \
408 message_count, user_message_count, task_count, event_count, duration_seconds, \
409 total_input_tokens, total_output_tokens, body_storage_key, \
410 git_remote, git_branch, git_commit, git_repo_name, working_directory, \
411 files_modified, files_read, has_errors, max_active_agents, is_auxiliary) \
412 VALUES (?1,'personal',?2,?3,?4,?5,?6,?7,?8,?9,?10,?11,?12,?13,?14,?15,'',?16,?17,?18,?19,?20,?21,?22,?23,?24,?25) \
413 ON CONFLICT(id) DO UPDATE SET \
414 tool=excluded.tool, agent_provider=excluded.agent_provider, \
415 agent_model=excluded.agent_model, \
416 title=excluded.title, description=excluded.description, \
417 tags=excluded.tags, \
418 message_count=excluded.message_count, user_message_count=excluded.user_message_count, \
419 task_count=excluded.task_count, \
420 event_count=excluded.event_count, duration_seconds=excluded.duration_seconds, \
421 total_input_tokens=excluded.total_input_tokens, \
422 total_output_tokens=excluded.total_output_tokens, \
423 git_remote=CASE WHEN ?26=1 THEN excluded.git_remote ELSE COALESCE(git_remote, excluded.git_remote) END, \
424 git_branch=CASE WHEN ?26=1 THEN excluded.git_branch ELSE COALESCE(git_branch, excluded.git_branch) END, \
425 git_commit=CASE WHEN ?26=1 THEN excluded.git_commit ELSE COALESCE(git_commit, excluded.git_commit) END, \
426 git_repo_name=CASE WHEN ?26=1 THEN excluded.git_repo_name ELSE COALESCE(git_repo_name, excluded.git_repo_name) END, \
427 working_directory=excluded.working_directory, \
428 files_modified=excluded.files_modified, files_read=excluded.files_read, \
429 has_errors=excluded.has_errors, \
430 max_active_agents=excluded.max_active_agents, \
431 is_auxiliary=excluded.is_auxiliary",
432 params![
433 &session.session_id,
434 &normalized_tool,
435 &session.agent.provider,
436 &session.agent.model,
437 title,
438 description,
439 &tags,
440 &created_at,
441 session.stats.message_count as i64,
442 session.stats.user_message_count as i64,
443 session.stats.task_count as i64,
444 session.stats.event_count as i64,
445 session.stats.duration_seconds as i64,
446 session.stats.total_input_tokens as i64,
447 session.stats.total_output_tokens as i64,
448 &merged_git.remote,
449 &merged_git.branch,
450 &merged_git.commit,
451 &merged_git.repo_name,
452 &cwd,
453 &files_modified,
454 &files_read,
455 has_errors,
456 max_active_agents,
457 is_auxiliary as i64,
458 has_session_git as i64,
459 ],
460 )?;
461
462 conn.execute(
463 "INSERT INTO session_sync (session_id, source_path, sync_status) \
464 VALUES (?1, ?2, 'local_only') \
465 ON CONFLICT(session_id) DO UPDATE SET source_path=excluded.source_path",
466 params![&session.session_id, source_path],
467 )?;
468 Ok(())
469 }
470
471 pub fn upsert_remote_session(&self, summary: &RemoteSessionSummary) -> Result<()> {
474 let conn = self.conn();
475 conn.execute(
477 "INSERT INTO sessions \
478 (id, user_id, team_id, tool, agent_provider, agent_model, \
479 title, description, tags, created_at, uploaded_at, \
480 message_count, task_count, event_count, duration_seconds, \
481 total_input_tokens, total_output_tokens, body_storage_key, \
482 git_remote, git_branch, git_commit, git_repo_name, \
483 pr_number, pr_url, working_directory, \
484 files_modified, files_read, has_errors, max_active_agents, is_auxiliary) \
485 VALUES (?1,?2,?3,?4,?5,?6,?7,?8,?9,?10,?11,?12,?13,?14,?15,?16,?17,'',?18,?19,?20,?21,?22,?23,?24,?25,?26,?27,?28,0) \
486 ON CONFLICT(id) DO UPDATE SET \
487 title=excluded.title, description=excluded.description, \
488 tags=excluded.tags, uploaded_at=excluded.uploaded_at, \
489 message_count=excluded.message_count, task_count=excluded.task_count, \
490 event_count=excluded.event_count, duration_seconds=excluded.duration_seconds, \
491 total_input_tokens=excluded.total_input_tokens, \
492 total_output_tokens=excluded.total_output_tokens, \
493 git_remote=excluded.git_remote, git_branch=excluded.git_branch, \
494 git_commit=excluded.git_commit, git_repo_name=excluded.git_repo_name, \
495 pr_number=excluded.pr_number, pr_url=excluded.pr_url, \
496 working_directory=excluded.working_directory, \
497 files_modified=excluded.files_modified, files_read=excluded.files_read, \
498 has_errors=excluded.has_errors, \
499 max_active_agents=excluded.max_active_agents, \
500 is_auxiliary=excluded.is_auxiliary",
501 params![
502 &summary.id,
503 &summary.user_id,
504 &summary.team_id,
505 &summary.tool,
506 &summary.agent_provider,
507 &summary.agent_model,
508 &summary.title,
509 &summary.description,
510 &summary.tags,
511 &summary.created_at,
512 &summary.uploaded_at,
513 summary.message_count,
514 summary.task_count,
515 summary.event_count,
516 summary.duration_seconds,
517 summary.total_input_tokens,
518 summary.total_output_tokens,
519 &summary.git_remote,
520 &summary.git_branch,
521 &summary.git_commit,
522 &summary.git_repo_name,
523 summary.pr_number,
524 &summary.pr_url,
525 &summary.working_directory,
526 &summary.files_modified,
527 &summary.files_read,
528 summary.has_errors,
529 summary.max_active_agents,
530 ],
531 )?;
532
533 conn.execute(
534 "INSERT INTO session_sync (session_id, sync_status) \
535 VALUES (?1, 'remote_only') \
536 ON CONFLICT(session_id) DO UPDATE SET \
537 sync_status = CASE WHEN session_sync.sync_status = 'local_only' THEN 'synced' ELSE session_sync.sync_status END",
538 params![&summary.id],
539 )?;
540 Ok(())
541 }
542
543 fn build_local_session_where_clause(
546 filter: &LocalSessionFilter,
547 ) -> (String, Vec<Box<dyn rusqlite::types::ToSql>>) {
548 let mut where_clauses = vec![
549 "1=1".to_string(),
550 "COALESCE(s.is_auxiliary, 0) = 0".to_string(),
551 ];
552 let mut param_values: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
553 let mut idx = 1u32;
554
555 if let Some(ref team_id) = filter.team_id {
556 where_clauses.push(format!("s.team_id = ?{idx}"));
557 param_values.push(Box::new(team_id.clone()));
558 idx += 1;
559 }
560
561 if let Some(ref sync_status) = filter.sync_status {
562 where_clauses.push(format!("COALESCE(ss.sync_status, 'unknown') = ?{idx}"));
563 param_values.push(Box::new(sync_status.clone()));
564 idx += 1;
565 }
566
567 if let Some(ref repo) = filter.git_repo_name {
568 where_clauses.push(format!("s.git_repo_name = ?{idx}"));
569 param_values.push(Box::new(repo.clone()));
570 idx += 1;
571 }
572
573 if let Some(ref tool) = filter.tool {
574 where_clauses.push(format!("s.tool = ?{idx}"));
575 param_values.push(Box::new(tool.clone()));
576 idx += 1;
577 }
578
579 if let Some(ref search) = filter.search {
580 let like = format!("%{search}%");
581 where_clauses.push(format!(
582 "(s.title LIKE ?{i1} OR s.description LIKE ?{i2} OR s.tags LIKE ?{i3})",
583 i1 = idx,
584 i2 = idx + 1,
585 i3 = idx + 2,
586 ));
587 param_values.push(Box::new(like.clone()));
588 param_values.push(Box::new(like.clone()));
589 param_values.push(Box::new(like));
590 idx += 3;
591 }
592
593 if filter.exclude_low_signal {
594 where_clauses.push(
595 "NOT (COALESCE(s.message_count, 0) = 0 \
596 AND COALESCE(s.user_message_count, 0) = 0 \
597 AND COALESCE(s.task_count, 0) = 0 \
598 AND COALESCE(s.event_count, 0) <= 2 \
599 AND (s.title IS NULL OR TRIM(s.title) = ''))"
600 .to_string(),
601 );
602 }
603
604 let interval = match filter.time_range {
605 LocalTimeRange::Hours24 => Some("-1 day"),
606 LocalTimeRange::Days7 => Some("-7 days"),
607 LocalTimeRange::Days30 => Some("-30 days"),
608 LocalTimeRange::All => None,
609 };
610 if let Some(interval) = interval {
611 where_clauses.push(format!("datetime(s.created_at) >= datetime('now', ?{idx})"));
612 param_values.push(Box::new(interval.to_string()));
613 }
614
615 (where_clauses.join(" AND "), param_values)
616 }
617
618 pub fn list_sessions(&self, filter: &LocalSessionFilter) -> Result<Vec<LocalSessionRow>> {
619 let (where_str, mut param_values) = Self::build_local_session_where_clause(filter);
620 let order_clause = match filter.sort {
621 LocalSortOrder::Popular => "s.message_count DESC, s.created_at DESC",
622 LocalSortOrder::Longest => "s.duration_seconds DESC, s.created_at DESC",
623 LocalSortOrder::Recent => "s.created_at DESC",
624 };
625
626 let mut sql = format!(
627 "SELECT {LOCAL_SESSION_COLUMNS} \
628 {FROM_CLAUSE} WHERE {where_str} \
629 ORDER BY {order_clause}"
630 );
631
632 if let Some(limit) = filter.limit {
633 sql.push_str(" LIMIT ?");
634 param_values.push(Box::new(limit));
635 if let Some(offset) = filter.offset {
636 sql.push_str(" OFFSET ?");
637 param_values.push(Box::new(offset));
638 }
639 }
640
641 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
642 param_values.iter().map(|p| p.as_ref()).collect();
643 let conn = self.conn();
644 let mut stmt = conn.prepare(&sql)?;
645 let rows = stmt.query_map(param_refs.as_slice(), row_to_local_session)?;
646
647 let mut result = Vec::new();
648 for row in rows {
649 result.push(row?);
650 }
651
652 Ok(result)
653 }
654
655 pub fn count_sessions_filtered(&self, filter: &LocalSessionFilter) -> Result<i64> {
657 let mut count_filter = filter.clone();
658 count_filter.limit = None;
659 count_filter.offset = None;
660 let (where_str, param_values) = Self::build_local_session_where_clause(&count_filter);
661 let sql = format!("SELECT COUNT(*) {FROM_CLAUSE} WHERE {where_str}");
662 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
663 param_values.iter().map(|p| p.as_ref()).collect();
664 let conn = self.conn();
665 let count = conn.query_row(&sql, param_refs.as_slice(), |row| row.get(0))?;
666 Ok(count)
667 }
668
669 pub fn list_session_tools(&self, filter: &LocalSessionFilter) -> Result<Vec<String>> {
671 let mut tool_filter = filter.clone();
672 tool_filter.tool = None;
673 tool_filter.limit = None;
674 tool_filter.offset = None;
675 let (where_str, param_values) = Self::build_local_session_where_clause(&tool_filter);
676 let sql = format!(
677 "SELECT DISTINCT s.tool \
678 {FROM_CLAUSE} WHERE {where_str} \
679 ORDER BY s.tool ASC"
680 );
681 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
682 param_values.iter().map(|p| p.as_ref()).collect();
683 let conn = self.conn();
684 let mut stmt = conn.prepare(&sql)?;
685 let rows = stmt.query_map(param_refs.as_slice(), |row| row.get::<_, String>(0))?;
686
687 let mut tools = Vec::new();
688 for row in rows {
689 let tool = row?;
690 if !tool.trim().is_empty() {
691 tools.push(tool);
692 }
693 }
694 Ok(tools)
695 }
696
697 pub fn list_sessions_log(&self, filter: &LogFilter) -> Result<Vec<LocalSessionRow>> {
701 let mut where_clauses = vec![
702 "1=1".to_string(),
703 "COALESCE(s.is_auxiliary, 0) = 0".to_string(),
704 ];
705 let mut param_values: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
706 let mut idx = 1u32;
707
708 if let Some(ref tool) = filter.tool {
709 where_clauses.push(format!("s.tool = ?{idx}"));
710 param_values.push(Box::new(tool.clone()));
711 idx += 1;
712 }
713
714 if let Some(ref model) = filter.model {
715 let like = model.replace('*', "%");
716 where_clauses.push(format!("s.agent_model LIKE ?{idx}"));
717 param_values.push(Box::new(like));
718 idx += 1;
719 }
720
721 if let Some(ref since) = filter.since {
722 where_clauses.push(format!("s.created_at >= ?{idx}"));
723 param_values.push(Box::new(since.clone()));
724 idx += 1;
725 }
726
727 if let Some(ref before) = filter.before {
728 where_clauses.push(format!("s.created_at < ?{idx}"));
729 param_values.push(Box::new(before.clone()));
730 idx += 1;
731 }
732
733 if let Some(ref touches) = filter.touches {
734 let like = format!("%\"{touches}\"%");
735 where_clauses.push(format!("s.files_modified LIKE ?{idx}"));
736 param_values.push(Box::new(like));
737 idx += 1;
738 }
739
740 if let Some(ref grep) = filter.grep {
741 let like = format!("%{grep}%");
742 where_clauses.push(format!(
743 "(s.title LIKE ?{i1} OR s.description LIKE ?{i2} OR s.tags LIKE ?{i3})",
744 i1 = idx,
745 i2 = idx + 1,
746 i3 = idx + 2,
747 ));
748 param_values.push(Box::new(like.clone()));
749 param_values.push(Box::new(like.clone()));
750 param_values.push(Box::new(like));
751 idx += 3;
752 }
753
754 if let Some(true) = filter.has_errors {
755 where_clauses.push("s.has_errors = 1".to_string());
756 }
757
758 if let Some(ref wd) = filter.working_directory {
759 where_clauses.push(format!("s.working_directory LIKE ?{idx}"));
760 param_values.push(Box::new(format!("{wd}%")));
761 idx += 1;
762 }
763
764 if let Some(ref repo) = filter.git_repo_name {
765 where_clauses.push(format!("s.git_repo_name = ?{idx}"));
766 param_values.push(Box::new(repo.clone()));
767 idx += 1;
768 }
769
770 let _ = idx; let where_str = where_clauses.join(" AND ");
773 let mut sql = format!(
774 "SELECT {LOCAL_SESSION_COLUMNS} \
775 {FROM_CLAUSE} WHERE {where_str} \
776 ORDER BY s.created_at DESC"
777 );
778
779 if let Some(limit) = filter.limit {
780 sql.push_str(" LIMIT ?");
781 param_values.push(Box::new(limit));
782 if let Some(offset) = filter.offset {
783 sql.push_str(" OFFSET ?");
784 param_values.push(Box::new(offset));
785 }
786 }
787
788 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
789 param_values.iter().map(|p| p.as_ref()).collect();
790 let conn = self.conn();
791 let mut stmt = conn.prepare(&sql)?;
792 let rows = stmt.query_map(param_refs.as_slice(), row_to_local_session)?;
793
794 let mut result = Vec::new();
795 for row in rows {
796 result.push(row?);
797 }
798 Ok(result)
799 }
800
801 pub fn get_sessions_by_tool_latest(
803 &self,
804 tool: &str,
805 count: u32,
806 ) -> Result<Vec<LocalSessionRow>> {
807 let sql = format!(
808 "SELECT {LOCAL_SESSION_COLUMNS} \
809 {FROM_CLAUSE} WHERE s.tool = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
810 ORDER BY s.created_at DESC"
811 );
812 let conn = self.conn();
813 let mut stmt = conn.prepare(&sql)?;
814 let rows = stmt.query_map(params![tool], row_to_local_session)?;
815 let mut result = Vec::new();
816 for row in rows {
817 result.push(row?);
818 }
819
820 result.truncate(count as usize);
821 Ok(result)
822 }
823
824 pub fn get_sessions_latest(&self, count: u32) -> Result<Vec<LocalSessionRow>> {
826 let sql = format!(
827 "SELECT {LOCAL_SESSION_COLUMNS} \
828 {FROM_CLAUSE} WHERE COALESCE(s.is_auxiliary, 0) = 0 \
829 ORDER BY s.created_at DESC"
830 );
831 let conn = self.conn();
832 let mut stmt = conn.prepare(&sql)?;
833 let rows = stmt.query_map([], row_to_local_session)?;
834 let mut result = Vec::new();
835 for row in rows {
836 result.push(row?);
837 }
838
839 result.truncate(count as usize);
840 Ok(result)
841 }
842
843 pub fn get_session_by_tool_offset(
845 &self,
846 tool: &str,
847 offset: u32,
848 ) -> Result<Option<LocalSessionRow>> {
849 let sql = format!(
850 "SELECT {LOCAL_SESSION_COLUMNS} \
851 {FROM_CLAUSE} WHERE s.tool = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
852 ORDER BY s.created_at DESC"
853 );
854 let conn = self.conn();
855 let mut stmt = conn.prepare(&sql)?;
856 let rows = stmt.query_map(params![tool], row_to_local_session)?;
857 let result = rows.collect::<Result<Vec<_>, _>>()?;
858 Ok(result.into_iter().nth(offset as usize))
859 }
860
861 pub fn get_session_by_offset(&self, offset: u32) -> Result<Option<LocalSessionRow>> {
863 let sql = format!(
864 "SELECT {LOCAL_SESSION_COLUMNS} \
865 {FROM_CLAUSE} WHERE COALESCE(s.is_auxiliary, 0) = 0 \
866 ORDER BY s.created_at DESC"
867 );
868 let conn = self.conn();
869 let mut stmt = conn.prepare(&sql)?;
870 let rows = stmt.query_map([], row_to_local_session)?;
871 let result = rows.collect::<Result<Vec<_>, _>>()?;
872 Ok(result.into_iter().nth(offset as usize))
873 }
874
875 pub fn get_session_source_path(&self, session_id: &str) -> Result<Option<String>> {
877 let conn = self.conn();
878 let result = conn
879 .query_row(
880 "SELECT source_path FROM session_sync WHERE session_id = ?1",
881 params![session_id],
882 |row| row.get(0),
883 )
884 .optional()?;
885
886 Ok(result)
887 }
888
889 pub fn session_count(&self) -> Result<i64> {
891 let count = self
892 .conn()
893 .query_row("SELECT COUNT(*) FROM sessions", [], |row| row.get(0))?;
894 Ok(count)
895 }
896
897 pub fn delete_session(&self, session_id: &str) -> Result<()> {
900 let conn = self.conn();
901 conn.execute(
902 "DELETE FROM body_cache WHERE session_id = ?1",
903 params![session_id],
904 )?;
905 conn.execute(
906 "DELETE FROM session_sync WHERE session_id = ?1",
907 params![session_id],
908 )?;
909 conn.execute("DELETE FROM sessions WHERE id = ?1", params![session_id])?;
910 Ok(())
911 }
912
913 pub fn get_sync_cursor(&self, team_id: &str) -> Result<Option<String>> {
916 let cursor = self
917 .conn()
918 .query_row(
919 "SELECT cursor FROM sync_cursors WHERE team_id = ?1",
920 params![team_id],
921 |row| row.get(0),
922 )
923 .optional()?;
924 Ok(cursor)
925 }
926
927 pub fn set_sync_cursor(&self, team_id: &str, cursor: &str) -> Result<()> {
928 self.conn().execute(
929 "INSERT INTO sync_cursors (team_id, cursor, updated_at) \
930 VALUES (?1, ?2, datetime('now')) \
931 ON CONFLICT(team_id) DO UPDATE SET cursor=excluded.cursor, updated_at=datetime('now')",
932 params![team_id, cursor],
933 )?;
934 Ok(())
935 }
936
937 pub fn pending_uploads(&self, team_id: &str) -> Result<Vec<LocalSessionRow>> {
941 let sql = format!(
942 "SELECT {LOCAL_SESSION_COLUMNS} \
943 FROM sessions s \
944 INNER JOIN session_sync ss ON ss.session_id = s.id \
945 LEFT JOIN users u ON u.id = s.user_id \
946 WHERE ss.sync_status = 'local_only' AND s.team_id = ?1 AND COALESCE(s.is_auxiliary, 0) = 0 \
947 ORDER BY s.created_at ASC"
948 );
949 let conn = self.conn();
950 let mut stmt = conn.prepare(&sql)?;
951 let rows = stmt.query_map(params![team_id], row_to_local_session)?;
952 let mut result = Vec::new();
953 for row in rows {
954 result.push(row?);
955 }
956 Ok(result)
957 }
958
959 pub fn mark_synced(&self, session_id: &str) -> Result<()> {
960 self.conn().execute(
961 "UPDATE session_sync SET sync_status = 'synced', last_synced_at = datetime('now') \
962 WHERE session_id = ?1",
963 params![session_id],
964 )?;
965 Ok(())
966 }
967
968 pub fn was_uploaded_after(
970 &self,
971 source_path: &str,
972 modified: &chrono::DateTime<chrono::Utc>,
973 ) -> Result<bool> {
974 let result: Option<String> = self
975 .conn()
976 .query_row(
977 "SELECT last_synced_at FROM session_sync \
978 WHERE source_path = ?1 AND sync_status = 'synced' AND last_synced_at IS NOT NULL",
979 params![source_path],
980 |row| row.get(0),
981 )
982 .optional()?;
983
984 if let Some(synced_at) = result {
985 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&synced_at) {
986 return Ok(dt >= *modified);
987 }
988 }
989 Ok(false)
990 }
991
992 pub fn cache_body(&self, session_id: &str, body: &[u8]) -> Result<()> {
995 self.conn().execute(
996 "INSERT INTO body_cache (session_id, body, cached_at) \
997 VALUES (?1, ?2, datetime('now')) \
998 ON CONFLICT(session_id) DO UPDATE SET body=excluded.body, cached_at=datetime('now')",
999 params![session_id, body],
1000 )?;
1001 Ok(())
1002 }
1003
1004 pub fn get_cached_body(&self, session_id: &str) -> Result<Option<Vec<u8>>> {
1005 let body = self
1006 .conn()
1007 .query_row(
1008 "SELECT body FROM body_cache WHERE session_id = ?1",
1009 params![session_id],
1010 |row| row.get(0),
1011 )
1012 .optional()?;
1013 Ok(body)
1014 }
1015
1016 pub fn find_active_session_for_repo(
1020 &self,
1021 repo_path: &str,
1022 since_minutes: u32,
1023 ) -> Result<Option<LocalSessionRow>> {
1024 let sql = format!(
1025 "SELECT {LOCAL_SESSION_COLUMNS} \
1026 {FROM_CLAUSE} \
1027 WHERE s.working_directory LIKE ?1 \
1028 AND COALESCE(s.is_auxiliary, 0) = 0 \
1029 AND s.created_at >= datetime('now', ?2) \
1030 ORDER BY s.created_at DESC LIMIT 1"
1031 );
1032 let since = format!("-{since_minutes} minutes");
1033 let like = format!("{repo_path}%");
1034 let conn = self.conn();
1035 let mut stmt = conn.prepare(&sql)?;
1036 let row = stmt
1037 .query_map(params![like, since], row_to_local_session)?
1038 .next()
1039 .transpose()?;
1040 Ok(row)
1041 }
1042
1043 pub fn existing_session_ids(&self) -> std::collections::HashSet<String> {
1045 let conn = self.conn();
1046 let mut stmt = conn
1047 .prepare("SELECT id FROM sessions")
1048 .unwrap_or_else(|_| panic!("failed to prepare existing_session_ids query"));
1049 let rows = stmt.query_map([], |row| row.get::<_, String>(0));
1050 let mut set = std::collections::HashSet::new();
1051 if let Ok(rows) = rows {
1052 for row in rows.flatten() {
1053 set.insert(row);
1054 }
1055 }
1056 set
1057 }
1058
1059 pub fn update_session_stats(&self, session: &Session) -> Result<()> {
1061 let title = session.context.title.as_deref();
1062 let description = session.context.description.as_deref();
1063 let (files_modified, files_read, has_errors) =
1064 opensession_core::extract::extract_file_metadata(session);
1065 let max_active_agents = opensession_core::agent_metrics::max_active_agents(session) as i64;
1066 let is_auxiliary = is_auxiliary_session(session);
1067
1068 self.conn().execute(
1069 "UPDATE sessions SET \
1070 title=?2, description=?3, \
1071 message_count=?4, user_message_count=?5, task_count=?6, \
1072 event_count=?7, duration_seconds=?8, \
1073 total_input_tokens=?9, total_output_tokens=?10, \
1074 files_modified=?11, files_read=?12, has_errors=?13, \
1075 max_active_agents=?14, is_auxiliary=?15 \
1076 WHERE id=?1",
1077 params![
1078 &session.session_id,
1079 title,
1080 description,
1081 session.stats.message_count as i64,
1082 session.stats.user_message_count as i64,
1083 session.stats.task_count as i64,
1084 session.stats.event_count as i64,
1085 session.stats.duration_seconds as i64,
1086 session.stats.total_input_tokens as i64,
1087 session.stats.total_output_tokens as i64,
1088 &files_modified,
1089 &files_read,
1090 has_errors,
1091 max_active_agents,
1092 is_auxiliary as i64,
1093 ],
1094 )?;
1095 Ok(())
1096 }
1097
1098 pub fn set_session_sync_path(&self, session_id: &str, source_path: &str) -> Result<()> {
1100 self.conn().execute(
1101 "INSERT INTO session_sync (session_id, source_path) \
1102 VALUES (?1, ?2) \
1103 ON CONFLICT(session_id) DO UPDATE SET source_path = excluded.source_path",
1104 params![session_id, source_path],
1105 )?;
1106 Ok(())
1107 }
1108
1109 pub fn list_repos(&self) -> Result<Vec<String>> {
1111 let conn = self.conn();
1112 let mut stmt = conn.prepare(
1113 "SELECT DISTINCT git_repo_name FROM sessions \
1114 WHERE git_repo_name IS NOT NULL AND COALESCE(is_auxiliary, 0) = 0 \
1115 ORDER BY git_repo_name ASC",
1116 )?;
1117 let rows = stmt.query_map([], |row| row.get(0))?;
1118 let mut result = Vec::new();
1119 for row in rows {
1120 result.push(row?);
1121 }
1122 Ok(result)
1123 }
1124
1125 pub fn list_working_directories(&self) -> Result<Vec<String>> {
1127 let conn = self.conn();
1128 let mut stmt = conn.prepare(
1129 "SELECT DISTINCT working_directory FROM sessions \
1130 WHERE working_directory IS NOT NULL AND TRIM(working_directory) <> '' \
1131 AND COALESCE(is_auxiliary, 0) = 0 \
1132 ORDER BY working_directory ASC",
1133 )?;
1134 let rows = stmt.query_map([], |row| row.get(0))?;
1135 let mut result = Vec::new();
1136 for row in rows {
1137 result.push(row?);
1138 }
1139 Ok(result)
1140 }
1141}
1142
1143fn open_connection_with_latest_schema(path: &PathBuf) -> Result<Connection> {
1146 let conn = Connection::open(path).with_context(|| format!("open db {}", path.display()))?;
1147 conn.execute_batch("PRAGMA journal_mode=WAL;")?;
1148
1149 conn.execute_batch("PRAGMA foreign_keys=OFF;")?;
1151
1152 apply_local_migrations(&conn)?;
1153 repair_session_tools_from_source_path(&conn)?;
1154 repair_auxiliary_flags_from_source_path(&conn)?;
1155 validate_local_schema(&conn)?;
1156
1157 Ok(conn)
1158}
1159
1160fn apply_local_migrations(conn: &Connection) -> Result<()> {
1161 conn.execute_batch(
1162 "CREATE TABLE IF NOT EXISTS _migrations (
1163 id INTEGER PRIMARY KEY,
1164 name TEXT NOT NULL UNIQUE,
1165 applied_at TEXT NOT NULL DEFAULT (datetime('now'))
1166 );",
1167 )
1168 .context("create _migrations table for local db")?;
1169
1170 for (name, sql) in MIGRATIONS.iter().chain(LOCAL_MIGRATIONS.iter()) {
1171 let already_applied: bool = conn
1172 .query_row(
1173 "SELECT COUNT(*) > 0 FROM _migrations WHERE name = ?1",
1174 [name],
1175 |row| row.get(0),
1176 )
1177 .unwrap_or(false);
1178
1179 if already_applied {
1180 continue;
1181 }
1182
1183 conn.execute_batch(sql)
1184 .with_context(|| format!("apply local migration {name}"))?;
1185
1186 conn.execute(
1187 "INSERT OR IGNORE INTO _migrations (name) VALUES (?1)",
1188 [name],
1189 )
1190 .with_context(|| format!("record local migration {name}"))?;
1191 }
1192
1193 Ok(())
1194}
1195
1196fn validate_local_schema(conn: &Connection) -> Result<()> {
1197 let sql = format!("SELECT {LOCAL_SESSION_COLUMNS} {FROM_CLAUSE} WHERE 1=0");
1198 conn.prepare(&sql)
1199 .map(|_| ())
1200 .context("validate local session schema")
1201}
1202
1203fn repair_session_tools_from_source_path(conn: &Connection) -> Result<()> {
1204 let mut stmt = conn.prepare(
1205 "SELECT s.id, s.tool, ss.source_path \
1206 FROM sessions s \
1207 LEFT JOIN session_sync ss ON ss.session_id = s.id \
1208 WHERE ss.source_path IS NOT NULL",
1209 )?;
1210 let rows = stmt.query_map([], |row| {
1211 Ok((
1212 row.get::<_, String>(0)?,
1213 row.get::<_, String>(1)?,
1214 row.get::<_, Option<String>>(2)?,
1215 ))
1216 })?;
1217
1218 let mut updates: Vec<(String, String)> = Vec::new();
1219 for row in rows {
1220 let (id, current_tool, source_path) = row?;
1221 let normalized = normalize_tool_for_source_path(¤t_tool, source_path.as_deref());
1222 if normalized != current_tool {
1223 updates.push((id, normalized));
1224 }
1225 }
1226 drop(stmt);
1227
1228 for (id, tool) in updates {
1229 conn.execute(
1230 "UPDATE sessions SET tool = ?1 WHERE id = ?2",
1231 params![tool, id],
1232 )?;
1233 }
1234
1235 Ok(())
1236}
1237
1238fn repair_auxiliary_flags_from_source_path(conn: &Connection) -> Result<()> {
1239 let mut stmt = conn.prepare(
1240 "SELECT s.id, ss.source_path \
1241 FROM sessions s \
1242 LEFT JOIN session_sync ss ON ss.session_id = s.id \
1243 WHERE ss.source_path IS NOT NULL \
1244 AND COALESCE(s.is_auxiliary, 0) = 0",
1245 )?;
1246 let rows = stmt.query_map([], |row| {
1247 Ok((row.get::<_, String>(0)?, row.get::<_, Option<String>>(1)?))
1248 })?;
1249
1250 let mut updates: Vec<String> = Vec::new();
1251 for row in rows {
1252 let (id, source_path) = row?;
1253 let Some(source_path) = source_path else {
1254 continue;
1255 };
1256 if infer_tool_from_source_path(Some(&source_path)) != Some("codex") {
1257 continue;
1258 }
1259 if is_codex_auxiliary_source_file(&source_path) {
1260 updates.push(id);
1261 }
1262 }
1263 drop(stmt);
1264
1265 for id in updates {
1266 conn.execute(
1267 "UPDATE sessions SET is_auxiliary = 1 WHERE id = ?1",
1268 params![id],
1269 )?;
1270 }
1271
1272 Ok(())
1273}
1274
1275fn is_codex_auxiliary_source_file(source_path: &str) -> bool {
1276 let Ok(file) = fs::File::open(source_path) else {
1277 return false;
1278 };
1279 let mut reader = BufReader::new(file);
1280 let mut first_line = String::new();
1281 if reader.read_line(&mut first_line).is_err() {
1282 return false;
1283 }
1284
1285 let line = first_line.trim();
1286 if line.is_empty() {
1287 return false;
1288 }
1289
1290 line.contains("\"source\":{\"subagent\"")
1291 || line.contains("\"source\": {\"subagent\"")
1292 || line.contains("\"agent_role\":\"awaiter\"")
1293 || line.contains("\"agent_role\":\"worker\"")
1294 || line.contains("\"agent_role\":\"explorer\"")
1295 || line.contains("\"agent_role\":\"subagent\"")
1296}
1297
1298pub const LOCAL_SESSION_COLUMNS: &str = "\
1300s.id, ss.source_path, COALESCE(ss.sync_status, 'unknown') AS sync_status, ss.last_synced_at, \
1301s.user_id, u.nickname, s.team_id, s.tool, s.agent_provider, s.agent_model, \
1302s.title, s.description, s.tags, s.created_at, s.uploaded_at, \
1303s.message_count, COALESCE(s.user_message_count, 0), s.task_count, s.event_count, s.duration_seconds, \
1304s.total_input_tokens, s.total_output_tokens, \
1305s.git_remote, s.git_branch, s.git_commit, s.git_repo_name, \
1306s.pr_number, s.pr_url, s.working_directory, \
1307s.files_modified, s.files_read, s.has_errors, COALESCE(s.max_active_agents, 1), COALESCE(s.is_auxiliary, 0)";
1308
1309fn row_to_local_session(row: &rusqlite::Row) -> rusqlite::Result<LocalSessionRow> {
1310 let source_path: Option<String> = row.get(1)?;
1311 let tool: String = row.get(7)?;
1312 let normalized_tool = normalize_tool_for_source_path(&tool, source_path.as_deref());
1313
1314 Ok(LocalSessionRow {
1315 id: row.get(0)?,
1316 source_path,
1317 sync_status: row.get(2)?,
1318 last_synced_at: row.get(3)?,
1319 user_id: row.get(4)?,
1320 nickname: row.get(5)?,
1321 team_id: row.get(6)?,
1322 tool: normalized_tool,
1323 agent_provider: row.get(8)?,
1324 agent_model: row.get(9)?,
1325 title: row.get(10)?,
1326 description: row.get(11)?,
1327 tags: row.get(12)?,
1328 created_at: row.get(13)?,
1329 uploaded_at: row.get(14)?,
1330 message_count: row.get(15)?,
1331 user_message_count: row.get(16)?,
1332 task_count: row.get(17)?,
1333 event_count: row.get(18)?,
1334 duration_seconds: row.get(19)?,
1335 total_input_tokens: row.get(20)?,
1336 total_output_tokens: row.get(21)?,
1337 git_remote: row.get(22)?,
1338 git_branch: row.get(23)?,
1339 git_commit: row.get(24)?,
1340 git_repo_name: row.get(25)?,
1341 pr_number: row.get(26)?,
1342 pr_url: row.get(27)?,
1343 working_directory: row.get(28)?,
1344 files_modified: row.get(29)?,
1345 files_read: row.get(30)?,
1346 has_errors: row.get::<_, i64>(31).unwrap_or(0) != 0,
1347 max_active_agents: row.get(32).unwrap_or(1),
1348 is_auxiliary: row.get::<_, i64>(33).unwrap_or(0) != 0,
1349 })
1350}
1351
1352fn default_db_path() -> Result<PathBuf> {
1353 let home = std::env::var("HOME")
1354 .or_else(|_| std::env::var("USERPROFILE"))
1355 .context("Could not determine home directory")?;
1356 Ok(PathBuf::from(home)
1357 .join(".local")
1358 .join("share")
1359 .join("opensession")
1360 .join("local.db"))
1361}
1362
1363#[cfg(test)]
1364mod tests {
1365 use super::*;
1366
1367 use std::fs::{create_dir_all, write};
1368 use tempfile::tempdir;
1369
1370 fn test_db() -> LocalDb {
1371 let dir = tempdir().unwrap();
1372 let path = dir.keep().join("test.db");
1373 LocalDb::open_path(&path).unwrap()
1374 }
1375
1376 fn temp_root() -> tempfile::TempDir {
1377 tempdir().unwrap()
1378 }
1379
1380 fn make_row(id: &str, tool: &str, source_path: Option<&str>) -> LocalSessionRow {
1381 LocalSessionRow {
1382 id: id.to_string(),
1383 source_path: source_path.map(String::from),
1384 sync_status: "local_only".to_string(),
1385 last_synced_at: None,
1386 user_id: None,
1387 nickname: None,
1388 team_id: None,
1389 tool: tool.to_string(),
1390 agent_provider: None,
1391 agent_model: None,
1392 title: Some("test".to_string()),
1393 description: None,
1394 tags: None,
1395 created_at: "2024-01-01T00:00:00Z".to_string(),
1396 uploaded_at: None,
1397 message_count: 0,
1398 user_message_count: 0,
1399 task_count: 0,
1400 event_count: 0,
1401 duration_seconds: 0,
1402 total_input_tokens: 0,
1403 total_output_tokens: 0,
1404 git_remote: None,
1405 git_branch: None,
1406 git_commit: None,
1407 git_repo_name: None,
1408 pr_number: None,
1409 pr_url: None,
1410 working_directory: None,
1411 files_modified: None,
1412 files_read: None,
1413 has_errors: false,
1414 max_active_agents: 1,
1415 is_auxiliary: false,
1416 }
1417 }
1418
1419 #[test]
1420 fn test_open_and_schema() {
1421 let _db = test_db();
1422 }
1423
1424 #[test]
1425 fn test_open_repairs_codex_tool_hint_from_source_path() {
1426 let dir = tempfile::tempdir().unwrap();
1427 let path = dir.path().join("repair.db");
1428
1429 {
1430 let _ = LocalDb::open_path(&path).unwrap();
1431 }
1432
1433 {
1434 let conn = Connection::open(&path).unwrap();
1435 conn.execute(
1436 "INSERT INTO sessions (id, team_id, tool, created_at, body_storage_key) VALUES (?1, 'personal', 'claude-code', ?2, '')",
1437 params!["rollout-repair", "2026-02-20T00:00:00Z"],
1438 )
1439 .unwrap();
1440 conn.execute(
1441 "INSERT INTO session_sync (session_id, source_path, sync_status) VALUES (?1, ?2, 'local_only')",
1442 params!["rollout-repair", "/Users/test/.codex/sessions/2026/02/20/rollout-repair.jsonl"],
1443 )
1444 .unwrap();
1445 }
1446
1447 let db = LocalDb::open_path(&path).unwrap();
1448 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1449 let row = rows
1450 .iter()
1451 .find(|row| row.id == "rollout-repair")
1452 .expect("repaired row");
1453 assert_eq!(row.tool, "codex");
1454 }
1455
1456 #[test]
1457 fn test_open_repairs_codex_auxiliary_flag_from_source_path() {
1458 let dir = tempfile::tempdir().unwrap();
1459 let path = dir.path().join("repair-auxiliary.db");
1460 let codex_dir = dir
1461 .path()
1462 .join(".codex")
1463 .join("sessions")
1464 .join("2026")
1465 .join("02")
1466 .join("20");
1467 create_dir_all(&codex_dir).unwrap();
1468 let source_path = codex_dir.join("rollout-subagent.jsonl");
1469 write(
1470 &source_path,
1471 r#"{"timestamp":"2026-02-20T00:00:00.000Z","type":"session_meta","payload":{"id":"rollout-subagent","timestamp":"2026-02-20T00:00:00.000Z","cwd":"/tmp","originator":"Codex Desktop","cli_version":"0.105.0","source":{"subagent":{"thread_spawn":{"parent_thread_id":"parent-session-id","depth":1,"agent_role":"awaiter"}}},"agent_role":"awaiter"}}\n"#,
1472 )
1473 .unwrap();
1474
1475 {
1476 let _ = LocalDb::open_path(&path).unwrap();
1477 }
1478
1479 {
1480 let conn = Connection::open(&path).unwrap();
1481 conn.execute(
1482 "INSERT INTO sessions (id, team_id, tool, created_at, body_storage_key, is_auxiliary) VALUES (?1, 'personal', 'codex', ?2, '', 0)",
1483 params!["rollout-subagent", "2026-02-20T00:00:00Z"],
1484 )
1485 .unwrap();
1486 conn.execute(
1487 "INSERT INTO session_sync (session_id, source_path, sync_status) VALUES (?1, ?2, 'local_only')",
1488 params!["rollout-subagent", source_path.to_string_lossy().to_string()],
1489 )
1490 .unwrap();
1491 }
1492
1493 let db = LocalDb::open_path(&path).unwrap();
1494 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1495 assert!(
1496 rows.iter().all(|row| row.id != "rollout-subagent"),
1497 "auxiliary codex session should be hidden after repair"
1498 );
1499 }
1500
1501 #[test]
1502 fn test_upsert_local_session_normalizes_tool_from_source_path() {
1503 let db = test_db();
1504 let mut session = Session::new(
1505 "rollout-upsert".to_string(),
1506 opensession_core::trace::Agent {
1507 provider: "openai".to_string(),
1508 model: "gpt-5".to_string(),
1509 tool: "claude-code".to_string(),
1510 tool_version: None,
1511 },
1512 );
1513 session.stats.event_count = 1;
1514
1515 db.upsert_local_session(
1516 &session,
1517 "/Users/test/.codex/sessions/2026/02/20/rollout-upsert.jsonl",
1518 &crate::git::GitContext::default(),
1519 )
1520 .unwrap();
1521
1522 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1523 let row = rows
1524 .iter()
1525 .find(|row| row.id == "rollout-upsert")
1526 .expect("upserted row");
1527 assert_eq!(row.tool, "codex");
1528 }
1529
1530 #[test]
1531 fn test_upsert_local_session_preserves_existing_git_when_session_has_no_git_metadata() {
1532 let db = test_db();
1533 let mut session = Session::new(
1534 "preserve-git".to_string(),
1535 opensession_core::trace::Agent {
1536 provider: "openai".to_string(),
1537 model: "gpt-5".to_string(),
1538 tool: "codex".to_string(),
1539 tool_version: None,
1540 },
1541 );
1542 session.stats.event_count = 1;
1543
1544 let first_git = crate::git::GitContext {
1545 remote: Some("https://github.com/acme/repo.git".to_string()),
1546 branch: Some("feature/original".to_string()),
1547 commit: Some("1111111".to_string()),
1548 repo_name: Some("acme/repo".to_string()),
1549 };
1550 db.upsert_local_session(
1551 &session,
1552 "/Users/test/.codex/sessions/2026/02/20/preserve-git.jsonl",
1553 &first_git,
1554 )
1555 .unwrap();
1556
1557 let second_git = crate::git::GitContext {
1558 remote: Some("https://github.com/acme/repo.git".to_string()),
1559 branch: Some("feature/current-head".to_string()),
1560 commit: Some("2222222".to_string()),
1561 repo_name: Some("acme/repo".to_string()),
1562 };
1563 db.upsert_local_session(
1564 &session,
1565 "/Users/test/.codex/sessions/2026/02/20/preserve-git.jsonl",
1566 &second_git,
1567 )
1568 .unwrap();
1569
1570 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1571 let row = rows
1572 .iter()
1573 .find(|row| row.id == "preserve-git")
1574 .expect("row exists");
1575 assert_eq!(row.git_branch.as_deref(), Some("feature/original"));
1576 assert_eq!(row.git_commit.as_deref(), Some("1111111"));
1577 }
1578
1579 #[test]
1580 fn test_upsert_local_session_prefers_git_branch_from_session_attributes() {
1581 let db = test_db();
1582 let mut session = Session::new(
1583 "session-git-branch".to_string(),
1584 opensession_core::trace::Agent {
1585 provider: "anthropic".to_string(),
1586 model: "claude-opus-4-6".to_string(),
1587 tool: "claude-code".to_string(),
1588 tool_version: None,
1589 },
1590 );
1591 session.stats.event_count = 1;
1592 session.context.attributes.insert(
1593 "git_branch".to_string(),
1594 serde_json::Value::String("from-session".to_string()),
1595 );
1596
1597 let fallback_git = crate::git::GitContext {
1598 remote: Some("https://github.com/acme/repo.git".to_string()),
1599 branch: Some("fallback-branch".to_string()),
1600 commit: Some("aaaaaaaa".to_string()),
1601 repo_name: Some("acme/repo".to_string()),
1602 };
1603 db.upsert_local_session(
1604 &session,
1605 "/Users/test/.claude/projects/foo/session-git-branch.jsonl",
1606 &fallback_git,
1607 )
1608 .unwrap();
1609
1610 session.context.attributes.insert(
1611 "git_branch".to_string(),
1612 serde_json::Value::String("from-session-updated".to_string()),
1613 );
1614 db.upsert_local_session(
1615 &session,
1616 "/Users/test/.claude/projects/foo/session-git-branch.jsonl",
1617 &fallback_git,
1618 )
1619 .unwrap();
1620
1621 let rows = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1622 let row = rows
1623 .iter()
1624 .find(|row| row.id == "session-git-branch")
1625 .expect("row exists");
1626 assert_eq!(row.git_branch.as_deref(), Some("from-session-updated"));
1627 }
1628
1629 #[test]
1630 fn test_is_opencode_child_session() {
1631 let root = temp_root();
1632 let dir = root.path().join("sessions");
1633 create_dir_all(&dir).unwrap();
1634 let parent_session = dir.join("parent.json");
1635 write(
1636 &parent_session,
1637 r#"{"id":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1638 )
1639 .unwrap();
1640 let child_session = dir.join("child.json");
1641 write(
1642 &child_session,
1643 r#"{"id":"ses_child","parentID":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1644 )
1645 .unwrap();
1646
1647 let parent = make_row(
1648 "ses_parent",
1649 "opencode",
1650 Some(parent_session.to_str().unwrap()),
1651 );
1652 let mut child = make_row(
1653 "ses_child",
1654 "opencode",
1655 Some(child_session.to_str().unwrap()),
1656 );
1657 child.is_auxiliary = true;
1658 let mut codex = make_row("ses_other", "codex", Some(child_session.to_str().unwrap()));
1659 codex.is_auxiliary = true;
1660
1661 assert!(!is_opencode_child_session(&parent));
1662 assert!(is_opencode_child_session(&child));
1663 assert!(!is_opencode_child_session(&codex));
1664 }
1665
1666 #[allow(deprecated)]
1667 #[test]
1668 fn test_parse_opencode_parent_session_id_aliases() {
1669 let root = temp_root();
1670 let dir = root.path().join("session-aliases");
1671 create_dir_all(&dir).unwrap();
1672 let child_session = dir.join("child.json");
1673 write(
1674 &child_session,
1675 r#"{"id":"ses_child","parentUUID":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1676 )
1677 .unwrap();
1678 assert_eq!(
1679 parse_opencode_parent_session_id(child_session.to_str().unwrap()).as_deref(),
1680 Some("ses_parent")
1681 );
1682 }
1683
1684 #[allow(deprecated)]
1685 #[test]
1686 fn test_parse_opencode_parent_session_id_nested_metadata() {
1687 let root = temp_root();
1688 let dir = root.path().join("session-nested");
1689 create_dir_all(&dir).unwrap();
1690 let child_session = dir.join("child.json");
1691 write(
1692 &child_session,
1693 r#"{"id":"ses_child","metadata":{"links":{"parentSessionId":"ses_parent","trace":"x"}}}"#,
1694 )
1695 .unwrap();
1696 assert_eq!(
1697 parse_opencode_parent_session_id(child_session.to_str().unwrap()).as_deref(),
1698 Some("ses_parent")
1699 );
1700 }
1701
1702 #[test]
1703 fn test_hide_opencode_child_sessions() {
1704 let root = temp_root();
1705 let dir = root.path().join("sessions");
1706 create_dir_all(&dir).unwrap();
1707 let parent_session = dir.join("parent.json");
1708 let child_session = dir.join("child.json");
1709 let orphan_session = dir.join("orphan.json");
1710
1711 write(
1712 &parent_session,
1713 r#"{"id":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1714 )
1715 .unwrap();
1716 write(
1717 &child_session,
1718 r#"{"id":"ses_child","parentID":"ses_parent","time":{"created":1000,"updated":1000}}"#,
1719 )
1720 .unwrap();
1721 write(
1722 &orphan_session,
1723 r#"{"id":"ses_orphan","time":{"created":1000,"updated":1000}}"#,
1724 )
1725 .unwrap();
1726
1727 let rows = vec![
1728 {
1729 let mut row = make_row(
1730 "ses_child",
1731 "opencode",
1732 Some(child_session.to_str().unwrap()),
1733 );
1734 row.is_auxiliary = true;
1735 row
1736 },
1737 make_row(
1738 "ses_parent",
1739 "opencode",
1740 Some(parent_session.to_str().unwrap()),
1741 ),
1742 {
1743 let mut row = make_row("ses_other", "codex", None);
1744 row.user_message_count = 1;
1745 row
1746 },
1747 make_row(
1748 "ses_orphan",
1749 "opencode",
1750 Some(orphan_session.to_str().unwrap()),
1751 ),
1752 ];
1753
1754 let filtered = hide_opencode_child_sessions(rows);
1755 assert_eq!(filtered.len(), 3);
1756 assert!(filtered.iter().all(|r| r.id != "ses_child"));
1757 }
1758
1759 #[test]
1760 fn test_sync_cursor() {
1761 let db = test_db();
1762 assert_eq!(db.get_sync_cursor("team1").unwrap(), None);
1763 db.set_sync_cursor("team1", "2024-01-01T00:00:00Z").unwrap();
1764 assert_eq!(
1765 db.get_sync_cursor("team1").unwrap(),
1766 Some("2024-01-01T00:00:00Z".to_string())
1767 );
1768 db.set_sync_cursor("team1", "2024-06-01T00:00:00Z").unwrap();
1770 assert_eq!(
1771 db.get_sync_cursor("team1").unwrap(),
1772 Some("2024-06-01T00:00:00Z".to_string())
1773 );
1774 }
1775
1776 #[test]
1777 fn test_body_cache() {
1778 let db = test_db();
1779 assert_eq!(db.get_cached_body("s1").unwrap(), None);
1780 db.cache_body("s1", b"hello world").unwrap();
1781 assert_eq!(
1782 db.get_cached_body("s1").unwrap(),
1783 Some(b"hello world".to_vec())
1784 );
1785 }
1786
1787 #[test]
1788 fn test_local_migrations_are_loaded_from_api_crate() {
1789 let migration_names: Vec<&str> = super::LOCAL_MIGRATIONS
1790 .iter()
1791 .map(|(name, _)| *name)
1792 .collect();
1793 assert!(
1794 migration_names.contains(&"local_0001_schema"),
1795 "expected local_0001_schema migration from opensession-api"
1796 );
1797 assert_eq!(
1798 migration_names.len(),
1799 1,
1800 "local schema should be single-step"
1801 );
1802
1803 let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
1804 let migrations_dir = manifest_dir.join("migrations");
1805 if migrations_dir.exists() {
1806 let sql_files = std::fs::read_dir(migrations_dir)
1807 .expect("read local-db migrations directory")
1808 .filter_map(Result::ok)
1809 .map(|entry| entry.file_name().to_string_lossy().to_string())
1810 .filter(|name| name.ends_with(".sql"))
1811 .collect::<Vec<_>>();
1812 assert!(
1813 sql_files.is_empty(),
1814 "local-db must not ship duplicated migration SQL files"
1815 );
1816 }
1817 }
1818
1819 #[test]
1820 fn test_local_schema_bootstrap_includes_is_auxiliary_column() {
1821 let dir = tempdir().unwrap();
1822 let path = dir.path().join("local.db");
1823 let db = LocalDb::open_path(&path).unwrap();
1824 let conn = db.conn();
1825 let mut stmt = conn.prepare("PRAGMA table_info(sessions)").unwrap();
1826 let columns = stmt
1827 .query_map([], |row| row.get::<_, String>(1))
1828 .unwrap()
1829 .collect::<std::result::Result<Vec<_>, _>>()
1830 .unwrap();
1831 assert!(
1832 columns.iter().any(|name| name == "is_auxiliary"),
1833 "sessions schema must include is_auxiliary column in bootstrap migration"
1834 );
1835 }
1836
1837 #[test]
1838 fn test_upsert_remote_session() {
1839 let db = test_db();
1840 let summary = RemoteSessionSummary {
1841 id: "remote-1".to_string(),
1842 user_id: Some("u1".to_string()),
1843 nickname: Some("alice".to_string()),
1844 team_id: "t1".to_string(),
1845 tool: "claude-code".to_string(),
1846 agent_provider: None,
1847 agent_model: None,
1848 title: Some("Test session".to_string()),
1849 description: None,
1850 tags: None,
1851 created_at: "2024-01-01T00:00:00Z".to_string(),
1852 uploaded_at: "2024-01-01T01:00:00Z".to_string(),
1853 message_count: 10,
1854 task_count: 2,
1855 event_count: 20,
1856 duration_seconds: 300,
1857 total_input_tokens: 1000,
1858 total_output_tokens: 500,
1859 git_remote: None,
1860 git_branch: None,
1861 git_commit: None,
1862 git_repo_name: None,
1863 pr_number: None,
1864 pr_url: None,
1865 working_directory: None,
1866 files_modified: None,
1867 files_read: None,
1868 has_errors: false,
1869 max_active_agents: 1,
1870 };
1871 db.upsert_remote_session(&summary).unwrap();
1872
1873 let sessions = db.list_sessions(&LocalSessionFilter::default()).unwrap();
1874 assert_eq!(sessions.len(), 1);
1875 assert_eq!(sessions[0].id, "remote-1");
1876 assert_eq!(sessions[0].sync_status, "remote_only");
1877 assert_eq!(sessions[0].nickname, None); assert!(!sessions[0].is_auxiliary);
1879 }
1880
1881 #[test]
1882 fn test_list_filter_by_repo() {
1883 let db = test_db();
1884 let summary1 = RemoteSessionSummary {
1886 id: "s1".to_string(),
1887 user_id: None,
1888 nickname: None,
1889 team_id: "t1".to_string(),
1890 tool: "claude-code".to_string(),
1891 agent_provider: None,
1892 agent_model: None,
1893 title: Some("Session 1".to_string()),
1894 description: None,
1895 tags: None,
1896 created_at: "2024-01-01T00:00:00Z".to_string(),
1897 uploaded_at: "2024-01-01T01:00:00Z".to_string(),
1898 message_count: 5,
1899 task_count: 0,
1900 event_count: 10,
1901 duration_seconds: 60,
1902 total_input_tokens: 100,
1903 total_output_tokens: 50,
1904 git_remote: None,
1905 git_branch: None,
1906 git_commit: None,
1907 git_repo_name: None,
1908 pr_number: None,
1909 pr_url: None,
1910 working_directory: None,
1911 files_modified: None,
1912 files_read: None,
1913 has_errors: false,
1914 max_active_agents: 1,
1915 };
1916 db.upsert_remote_session(&summary1).unwrap();
1917
1918 let filter = LocalSessionFilter {
1920 team_id: Some("t1".to_string()),
1921 ..Default::default()
1922 };
1923 assert_eq!(db.list_sessions(&filter).unwrap().len(), 1);
1924
1925 let filter = LocalSessionFilter {
1926 team_id: Some("t999".to_string()),
1927 ..Default::default()
1928 };
1929 assert_eq!(db.list_sessions(&filter).unwrap().len(), 0);
1930 }
1931
1932 fn make_summary(id: &str, tool: &str, title: &str, created_at: &str) -> RemoteSessionSummary {
1935 RemoteSessionSummary {
1936 id: id.to_string(),
1937 user_id: None,
1938 nickname: None,
1939 team_id: "t1".to_string(),
1940 tool: tool.to_string(),
1941 agent_provider: Some("anthropic".to_string()),
1942 agent_model: Some("claude-opus-4-6".to_string()),
1943 title: Some(title.to_string()),
1944 description: None,
1945 tags: None,
1946 created_at: created_at.to_string(),
1947 uploaded_at: created_at.to_string(),
1948 message_count: 5,
1949 task_count: 1,
1950 event_count: 10,
1951 duration_seconds: 300,
1952 total_input_tokens: 1000,
1953 total_output_tokens: 500,
1954 git_remote: None,
1955 git_branch: None,
1956 git_commit: None,
1957 git_repo_name: None,
1958 pr_number: None,
1959 pr_url: None,
1960 working_directory: None,
1961 files_modified: None,
1962 files_read: None,
1963 has_errors: false,
1964 max_active_agents: 1,
1965 }
1966 }
1967
1968 fn seed_sessions(db: &LocalDb) {
1969 db.upsert_remote_session(&make_summary(
1971 "s1",
1972 "claude-code",
1973 "First session",
1974 "2024-01-01T00:00:00Z",
1975 ))
1976 .unwrap();
1977 db.upsert_remote_session(&make_summary(
1978 "s2",
1979 "claude-code",
1980 "JWT auth work",
1981 "2024-01-02T00:00:00Z",
1982 ))
1983 .unwrap();
1984 db.upsert_remote_session(&make_summary(
1985 "s3",
1986 "gemini",
1987 "Gemini test",
1988 "2024-01-03T00:00:00Z",
1989 ))
1990 .unwrap();
1991 db.upsert_remote_session(&make_summary(
1992 "s4",
1993 "claude-code",
1994 "Error handling",
1995 "2024-01-04T00:00:00Z",
1996 ))
1997 .unwrap();
1998 db.upsert_remote_session(&make_summary(
1999 "s5",
2000 "claude-code",
2001 "Final polish",
2002 "2024-01-05T00:00:00Z",
2003 ))
2004 .unwrap();
2005 }
2006
2007 #[test]
2010 fn test_log_no_filters() {
2011 let db = test_db();
2012 seed_sessions(&db);
2013 let filter = LogFilter::default();
2014 let results = db.list_sessions_log(&filter).unwrap();
2015 assert_eq!(results.len(), 5);
2016 assert_eq!(results[0].id, "s5");
2018 assert_eq!(results[4].id, "s1");
2019 }
2020
2021 #[test]
2022 fn test_log_filter_by_tool() {
2023 let db = test_db();
2024 seed_sessions(&db);
2025 let filter = LogFilter {
2026 tool: Some("claude-code".to_string()),
2027 ..Default::default()
2028 };
2029 let results = db.list_sessions_log(&filter).unwrap();
2030 assert_eq!(results.len(), 4);
2031 assert!(results.iter().all(|s| s.tool == "claude-code"));
2032 }
2033
2034 #[test]
2035 fn test_log_filter_by_model_wildcard() {
2036 let db = test_db();
2037 seed_sessions(&db);
2038 let filter = LogFilter {
2039 model: Some("claude*".to_string()),
2040 ..Default::default()
2041 };
2042 let results = db.list_sessions_log(&filter).unwrap();
2043 assert_eq!(results.len(), 5); }
2045
2046 #[test]
2047 fn test_log_filter_since() {
2048 let db = test_db();
2049 seed_sessions(&db);
2050 let filter = LogFilter {
2051 since: Some("2024-01-03T00:00:00Z".to_string()),
2052 ..Default::default()
2053 };
2054 let results = db.list_sessions_log(&filter).unwrap();
2055 assert_eq!(results.len(), 3); }
2057
2058 #[test]
2059 fn test_log_filter_before() {
2060 let db = test_db();
2061 seed_sessions(&db);
2062 let filter = LogFilter {
2063 before: Some("2024-01-03T00:00:00Z".to_string()),
2064 ..Default::default()
2065 };
2066 let results = db.list_sessions_log(&filter).unwrap();
2067 assert_eq!(results.len(), 2); }
2069
2070 #[test]
2071 fn test_log_filter_since_and_before() {
2072 let db = test_db();
2073 seed_sessions(&db);
2074 let filter = LogFilter {
2075 since: Some("2024-01-02T00:00:00Z".to_string()),
2076 before: Some("2024-01-04T00:00:00Z".to_string()),
2077 ..Default::default()
2078 };
2079 let results = db.list_sessions_log(&filter).unwrap();
2080 assert_eq!(results.len(), 2); }
2082
2083 #[test]
2084 fn test_log_filter_grep() {
2085 let db = test_db();
2086 seed_sessions(&db);
2087 let filter = LogFilter {
2088 grep: Some("JWT".to_string()),
2089 ..Default::default()
2090 };
2091 let results = db.list_sessions_log(&filter).unwrap();
2092 assert_eq!(results.len(), 1);
2093 assert_eq!(results[0].id, "s2");
2094 }
2095
2096 #[test]
2097 fn test_log_limit_and_offset() {
2098 let db = test_db();
2099 seed_sessions(&db);
2100 let filter = LogFilter {
2101 limit: Some(2),
2102 offset: Some(1),
2103 ..Default::default()
2104 };
2105 let results = db.list_sessions_log(&filter).unwrap();
2106 assert_eq!(results.len(), 2);
2107 assert_eq!(results[0].id, "s4"); assert_eq!(results[1].id, "s3");
2109 }
2110
2111 #[test]
2112 fn test_log_limit_only() {
2113 let db = test_db();
2114 seed_sessions(&db);
2115 let filter = LogFilter {
2116 limit: Some(3),
2117 ..Default::default()
2118 };
2119 let results = db.list_sessions_log(&filter).unwrap();
2120 assert_eq!(results.len(), 3);
2121 }
2122
2123 #[test]
2124 fn test_list_sessions_limit_offset() {
2125 let db = test_db();
2126 seed_sessions(&db);
2127 let filter = LocalSessionFilter {
2128 limit: Some(2),
2129 offset: Some(1),
2130 ..Default::default()
2131 };
2132 let results = db.list_sessions(&filter).unwrap();
2133 assert_eq!(results.len(), 2);
2134 assert_eq!(results[0].id, "s4");
2135 assert_eq!(results[1].id, "s3");
2136 }
2137
2138 #[test]
2139 fn test_count_sessions_filtered() {
2140 let db = test_db();
2141 seed_sessions(&db);
2142 let count = db
2143 .count_sessions_filtered(&LocalSessionFilter::default())
2144 .unwrap();
2145 assert_eq!(count, 5);
2146 }
2147
2148 #[test]
2149 fn test_list_and_count_filters_match_when_auxiliary_rows_exist() {
2150 let db = test_db();
2151 seed_sessions(&db);
2152 db.conn()
2153 .execute(
2154 "UPDATE sessions SET is_auxiliary = 1 WHERE id IN ('s2', 's3')",
2155 [],
2156 )
2157 .unwrap();
2158
2159 let default_filter = LocalSessionFilter::default();
2160 let rows = db.list_sessions(&default_filter).unwrap();
2161 let count = db.count_sessions_filtered(&default_filter).unwrap();
2162 assert_eq!(rows.len() as i64, count);
2163 assert!(rows.iter().all(|row| !row.is_auxiliary));
2164
2165 let gemini_filter = LocalSessionFilter {
2166 tool: Some("gemini".to_string()),
2167 ..Default::default()
2168 };
2169 let gemini_rows = db.list_sessions(&gemini_filter).unwrap();
2170 let gemini_count = db.count_sessions_filtered(&gemini_filter).unwrap();
2171 assert_eq!(gemini_rows.len() as i64, gemini_count);
2172 assert!(gemini_rows.is_empty());
2173 assert_eq!(gemini_count, 0);
2174 }
2175
2176 #[test]
2177 fn test_exclude_low_signal_filter_hides_metadata_only_sessions() {
2178 let db = test_db();
2179
2180 let mut low_signal = make_summary("meta-only", "claude-code", "", "2024-01-01T00:00:00Z");
2181 low_signal.title = None;
2182 low_signal.message_count = 0;
2183 low_signal.task_count = 0;
2184 low_signal.event_count = 2;
2185 low_signal.git_repo_name = Some("frontend/aviss-react-front".to_string());
2186
2187 let mut normal = make_summary(
2188 "real-work",
2189 "opencode",
2190 "Socket.IO decision",
2191 "2024-01-02T00:00:00Z",
2192 );
2193 normal.message_count = 14;
2194 normal.task_count = 2;
2195 normal.event_count = 38;
2196 normal.git_repo_name = Some("frontend/aviss-react-front".to_string());
2197
2198 db.upsert_remote_session(&low_signal).unwrap();
2199 db.upsert_remote_session(&normal).unwrap();
2200
2201 let default_filter = LocalSessionFilter {
2202 git_repo_name: Some("frontend/aviss-react-front".to_string()),
2203 ..Default::default()
2204 };
2205 assert_eq!(db.list_sessions(&default_filter).unwrap().len(), 2);
2206 assert_eq!(db.count_sessions_filtered(&default_filter).unwrap(), 2);
2207
2208 let repo_filter = LocalSessionFilter {
2209 git_repo_name: Some("frontend/aviss-react-front".to_string()),
2210 exclude_low_signal: true,
2211 ..Default::default()
2212 };
2213 let rows = db.list_sessions(&repo_filter).unwrap();
2214 assert_eq!(rows.len(), 1);
2215 assert_eq!(rows[0].id, "real-work");
2216 assert_eq!(db.count_sessions_filtered(&repo_filter).unwrap(), 1);
2217 }
2218
2219 #[test]
2220 fn test_list_working_directories_distinct_non_empty() {
2221 let db = test_db();
2222
2223 let mut a = make_summary("wd-1", "claude-code", "One", "2024-01-01T00:00:00Z");
2224 a.working_directory = Some("/tmp/repo-a".to_string());
2225 let mut b = make_summary("wd-2", "claude-code", "Two", "2024-01-02T00:00:00Z");
2226 b.working_directory = Some("/tmp/repo-a".to_string());
2227 let mut c = make_summary("wd-3", "claude-code", "Three", "2024-01-03T00:00:00Z");
2228 c.working_directory = Some("/tmp/repo-b".to_string());
2229 let mut d = make_summary("wd-4", "claude-code", "Four", "2024-01-04T00:00:00Z");
2230 d.working_directory = Some("".to_string());
2231
2232 db.upsert_remote_session(&a).unwrap();
2233 db.upsert_remote_session(&b).unwrap();
2234 db.upsert_remote_session(&c).unwrap();
2235 db.upsert_remote_session(&d).unwrap();
2236
2237 let dirs = db.list_working_directories().unwrap();
2238 assert_eq!(
2239 dirs,
2240 vec!["/tmp/repo-a".to_string(), "/tmp/repo-b".to_string()]
2241 );
2242 }
2243
2244 #[test]
2245 fn test_list_session_tools() {
2246 let db = test_db();
2247 seed_sessions(&db);
2248 let tools = db
2249 .list_session_tools(&LocalSessionFilter::default())
2250 .unwrap();
2251 assert_eq!(tools, vec!["claude-code".to_string(), "gemini".to_string()]);
2252 }
2253
2254 #[test]
2255 fn test_log_combined_filters() {
2256 let db = test_db();
2257 seed_sessions(&db);
2258 let filter = LogFilter {
2259 tool: Some("claude-code".to_string()),
2260 since: Some("2024-01-03T00:00:00Z".to_string()),
2261 limit: Some(1),
2262 ..Default::default()
2263 };
2264 let results = db.list_sessions_log(&filter).unwrap();
2265 assert_eq!(results.len(), 1);
2266 assert_eq!(results[0].id, "s5"); }
2268
2269 #[test]
2272 fn test_get_session_by_offset() {
2273 let db = test_db();
2274 seed_sessions(&db);
2275 let row = db.get_session_by_offset(0).unwrap().unwrap();
2276 assert_eq!(row.id, "s5"); let row = db.get_session_by_offset(2).unwrap().unwrap();
2278 assert_eq!(row.id, "s3");
2279 assert!(db.get_session_by_offset(10).unwrap().is_none());
2280 }
2281
2282 #[test]
2283 fn test_get_session_by_tool_offset() {
2284 let db = test_db();
2285 seed_sessions(&db);
2286 let row = db
2287 .get_session_by_tool_offset("claude-code", 0)
2288 .unwrap()
2289 .unwrap();
2290 assert_eq!(row.id, "s5");
2291 let row = db
2292 .get_session_by_tool_offset("claude-code", 1)
2293 .unwrap()
2294 .unwrap();
2295 assert_eq!(row.id, "s4");
2296 let row = db.get_session_by_tool_offset("gemini", 0).unwrap().unwrap();
2297 assert_eq!(row.id, "s3");
2298 assert!(db
2299 .get_session_by_tool_offset("gemini", 1)
2300 .unwrap()
2301 .is_none());
2302 }
2303
2304 #[test]
2305 fn test_get_sessions_latest() {
2306 let db = test_db();
2307 seed_sessions(&db);
2308 let rows = db.get_sessions_latest(3).unwrap();
2309 assert_eq!(rows.len(), 3);
2310 assert_eq!(rows[0].id, "s5");
2311 assert_eq!(rows[1].id, "s4");
2312 assert_eq!(rows[2].id, "s3");
2313 }
2314
2315 #[test]
2316 fn test_get_sessions_by_tool_latest() {
2317 let db = test_db();
2318 seed_sessions(&db);
2319 let rows = db.get_sessions_by_tool_latest("claude-code", 2).unwrap();
2320 assert_eq!(rows.len(), 2);
2321 assert_eq!(rows[0].id, "s5");
2322 assert_eq!(rows[1].id, "s4");
2323 }
2324
2325 #[test]
2326 fn test_get_sessions_latest_more_than_available() {
2327 let db = test_db();
2328 seed_sessions(&db);
2329 let rows = db.get_sessions_by_tool_latest("gemini", 10).unwrap();
2330 assert_eq!(rows.len(), 1); }
2332
2333 #[test]
2334 fn test_session_count() {
2335 let db = test_db();
2336 assert_eq!(db.session_count().unwrap(), 0);
2337 seed_sessions(&db);
2338 assert_eq!(db.session_count().unwrap(), 5);
2339 }
2340}