1pub mod schema;
2#[cfg(test)]
3mod test_helpers;
4
5use anyhow::{Context, Result};
6use rusqlite::Connection;
7use serde::{Deserialize, Serialize};
8use std::path::Path;
9use std::sync::{Arc, Mutex};
10
11pub struct BlockerRow {
12 pub issue_id: String,
13 pub identifier: String,
14 pub title: String,
15 pub state_name: String,
16 pub state_type: String,
17}
18
19#[derive(Clone)]
20pub struct Database {
21 conn: Arc<Mutex<Connection>>,
22}
23
24impl Database {
25 pub fn open(path: &Path) -> Result<Self> {
26 let conn = Connection::open(path)
27 .with_context(|| format!("Failed to open database at {}", path.display()))?;
28
29 conn.execute_batch("PRAGMA journal_mode=WAL; PRAGMA foreign_keys=ON;")?;
30
31 let db = Self {
32 conn: Arc::new(Mutex::new(conn)),
33 };
34 db.migrate()?;
35 Ok(db)
36 }
37
38 fn migrate(&self) -> Result<()> {
39 let conn = self.conn.lock().unwrap();
40 schema::run_migrations(&conn)?;
41 Ok(())
42 }
43
44 pub fn with_conn<F, T>(&self, f: F) -> Result<T>
45 where
46 F: FnOnce(&Connection) -> Result<T>,
47 {
48 let conn = self.conn.lock().unwrap();
49 f(&conn)
50 }
51
52 pub fn upsert_workspace(
55 &self,
56 id: &str,
57 linear_org_id: Option<&str>,
58 display_name: Option<&str>,
59 ) -> Result<()> {
60 self.with_conn(|conn| {
61 conn.execute(
62 "INSERT INTO workspaces (id, linear_org_id, display_name)
63 VALUES (?1, ?2, ?3)
64 ON CONFLICT(id) DO UPDATE SET
65 linear_org_id=excluded.linear_org_id,
66 display_name=excluded.display_name",
67 rusqlite::params![id, linear_org_id, display_name],
68 )?;
69 Ok(())
70 })
71 }
72
73 pub fn get_workspace(&self, id: &str) -> Result<Option<WorkspaceRow>> {
74 self.with_conn(|conn| {
75 let mut stmt = conn.prepare(
76 "SELECT id, linear_org_id, display_name, created_at FROM workspaces WHERE id = ?1",
77 )?;
78 let mut rows = stmt.query(rusqlite::params![id])?;
79 if let Some(row) = rows.next()? {
80 Ok(Some(WorkspaceRow {
81 id: row.get(0)?,
82 linear_org_id: row.get(1)?,
83 display_name: row.get(2)?,
84 created_at: row.get(3)?,
85 }))
86 } else {
87 Ok(None)
88 }
89 })
90 }
91
92 pub fn list_workspaces(&self) -> Result<Vec<WorkspaceRow>> {
93 self.with_conn(|conn| {
94 let mut stmt = conn.prepare(
95 "SELECT id, linear_org_id, display_name, created_at FROM workspaces ORDER BY id",
96 )?;
97 let rows = stmt.query_map([], |row| {
98 Ok(WorkspaceRow {
99 id: row.get(0)?,
100 linear_org_id: row.get(1)?,
101 display_name: row.get(2)?,
102 created_at: row.get(3)?,
103 })
104 })?;
105 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
106 })
107 }
108
109 pub fn delete_workspace(&self, id: &str) -> Result<usize> {
111 self.with_conn(|conn| {
112 let issue_count: usize = conn.query_row(
114 "SELECT COUNT(*) FROM issues WHERE workspace_id = ?1",
115 rusqlite::params![id],
116 |row| row.get(0),
117 )?;
118 conn.execute(
119 "DELETE FROM issues WHERE workspace_id = ?1",
120 rusqlite::params![id],
121 )?;
122 conn.execute(
123 "DELETE FROM comments WHERE workspace_id = ?1",
124 rusqlite::params![id],
125 )?;
126 conn.execute(
127 "DELETE FROM sync_state WHERE workspace_id = ?1",
128 rusqlite::params![id],
129 )?;
130 conn.execute(
131 "DELETE FROM workspaces WHERE id = ?1",
132 rusqlite::params![id],
133 )?;
134 Ok(issue_count)
135 })
136 }
137
138 pub fn upsert_issue(&self, issue: &Issue) -> Result<()> {
141 self.with_conn(|conn| {
142 conn.execute(
143 "INSERT INTO issues (id, identifier, team_key, title, description, state_name, state_type, priority, assignee_name, project_name, labels_json, created_at, updated_at, content_hash, synced_at, url, branch_name, workspace_id)
144 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, datetime('now'), ?15, ?16, ?17)
145 ON CONFLICT(id) DO UPDATE SET
146 identifier=excluded.identifier, team_key=excluded.team_key, title=excluded.title,
147 description=excluded.description, state_name=excluded.state_name, state_type=excluded.state_type,
148 priority=excluded.priority, assignee_name=excluded.assignee_name, project_name=excluded.project_name,
149 labels_json=excluded.labels_json, updated_at=excluded.updated_at,
150 content_hash=excluded.content_hash, url=excluded.url, branch_name=excluded.branch_name,
151 workspace_id=excluded.workspace_id, synced_at=datetime('now')",
152 rusqlite::params![
153 issue.id, issue.identifier, issue.team_key, issue.title, issue.description,
154 issue.state_name, issue.state_type, issue.priority, issue.assignee_name,
155 issue.project_name, issue.labels_json, issue.created_at, issue.updated_at,
156 issue.content_hash, issue.url, issue.branch_name, issue.workspace_id,
157 ],
158 )?;
159 Ok(())
160 })
161 }
162
163 pub fn get_issue(&self, id_or_identifier: &str) -> Result<Option<Issue>> {
164 self.with_conn(|conn| {
165 let mut stmt = conn.prepare(
166 "SELECT id, identifier, team_key, title, description, state_name, state_type, priority, assignee_name, project_name, labels_json, created_at, updated_at, content_hash, synced_at, url, branch_name, workspace_id
167 FROM issues WHERE id = ?1 OR identifier = ?1"
168 )?;
169 let mut rows = stmt.query(rusqlite::params![id_or_identifier])?;
170 if let Some(row) = rows.next()? {
171 Ok(Some(Issue::from_row(row)?))
172 } else {
173 Ok(None)
174 }
175 })
176 }
177
178 pub fn get_unprioritized_issues(
179 &self,
180 team_key: Option<&str>,
181 include_completed: bool,
182 workspace_id: &str,
183 ) -> Result<Vec<Issue>> {
184 self.with_conn(|conn| {
185 let state_filter = if include_completed {
186 ""
187 } else {
188 " AND state_type NOT IN ('completed', 'canceled')"
189 };
190 let (sql, params): (String, Vec<Box<dyn rusqlite::types::ToSql>>) = if let Some(team) = team_key {
191 (
192 format!(
193 "SELECT id, identifier, team_key, title, description, state_name, state_type, priority, assignee_name, project_name, labels_json, created_at, updated_at, content_hash, synced_at, url, branch_name, workspace_id
194 FROM issues WHERE priority = 0{} AND team_key = ?1 AND workspace_id = ?2
195 ORDER BY created_at DESC", state_filter
196 ),
197 vec![Box::new(team.to_string()) as Box<dyn rusqlite::types::ToSql>, Box::new(workspace_id.to_string())],
198 )
199 } else {
200 (
201 format!(
202 "SELECT id, identifier, team_key, title, description, state_name, state_type, priority, assignee_name, project_name, labels_json, created_at, updated_at, content_hash, synced_at, url, branch_name, workspace_id
203 FROM issues WHERE priority = 0{} AND workspace_id = ?1
204 ORDER BY created_at DESC", state_filter
205 ),
206 vec![Box::new(workspace_id.to_string()) as Box<dyn rusqlite::types::ToSql>],
207 )
208 };
209 let mut stmt = conn.prepare(&sql)?;
210 let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
211 let rows = stmt.query_map(param_refs.as_slice(), |row| {
212 Ok(Issue::from_row(row).unwrap())
213 })?;
214 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
215 })
216 }
217
218 pub fn get_issues_by_state_types(
219 &self,
220 team_key: &str,
221 state_types: &[String],
222 workspace_id: &str,
223 ) -> Result<Vec<Issue>> {
224 self.with_conn(|conn| {
225 let placeholders: String = state_types
226 .iter()
227 .enumerate()
228 .map(|(i, _)| format!("?{}", i + 3))
229 .collect::<Vec<_>>()
230 .join(", ");
231 let sql = format!(
232 "SELECT id, identifier, team_key, title, description, state_name, state_type, \
233 priority, assignee_name, project_name, labels_json, created_at, updated_at, \
234 content_hash, synced_at, url, branch_name, workspace_id \
235 FROM issues WHERE team_key = ?1 AND workspace_id = ?2 AND state_type IN ({placeholders}) \
236 ORDER BY priority ASC, created_at DESC"
237 );
238 let mut stmt = conn.prepare(&sql)?;
239 let mut params: Vec<Box<dyn rusqlite::types::ToSql>> =
240 vec![Box::new(team_key.to_string()), Box::new(workspace_id.to_string())];
241 for st in state_types {
242 params.push(Box::new(st.clone()));
243 }
244 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
245 params.iter().map(|p| p.as_ref()).collect();
246 let rows = stmt.query_map(param_refs.as_slice(), |row| {
247 Ok(Issue::from_row(row).unwrap())
248 })?;
249 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
250 })
251 }
252
253 pub fn get_blockers_for_issues(&self, issue_ids: &[String]) -> Result<Vec<BlockerRow>> {
256 if issue_ids.is_empty() {
257 return Ok(vec![]);
258 }
259 self.with_conn(|conn| {
260 let placeholders: String = issue_ids
261 .iter()
262 .enumerate()
263 .map(|(i, _)| format!("?{}", i + 1))
264 .collect::<Vec<_>>()
265 .join(", ");
266
267 let sql_fwd = format!(
269 "SELECT r.issue_id, COALESCE(i.identifier, r.related_issue_identifier),
270 COALESCE(i.title, ''), COALESCE(i.state_name, ''), COALESCE(i.state_type, '')
271 FROM issue_relations r
272 LEFT JOIN issues i ON r.related_issue_id = i.id
273 WHERE r.issue_id IN ({placeholders}) AND r.relation_type = 'blocked_by'"
274 );
275
276 let sql_inv = format!(
278 "SELECT r.related_issue_id, i2.identifier,
279 COALESCE(i2.title, ''), COALESCE(i2.state_name, ''), COALESCE(i2.state_type, '')
280 FROM issue_relations r
281 JOIN issues i ON r.related_issue_id = i.id
282 JOIN issues i2 ON r.issue_id = i2.id
283 WHERE r.related_issue_id IN ({placeholders}) AND r.relation_type = 'blocks'"
284 );
285
286 let mut results = Vec::new();
287 let params: Vec<Box<dyn rusqlite::types::ToSql>> =
288 issue_ids.iter().map(|id| Box::new(id.clone()) as _).collect();
289 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
290 params.iter().map(|p| p.as_ref()).collect();
291
292 for sql in [&sql_fwd, &sql_inv] {
293 let mut stmt = conn.prepare(sql)?;
294 let rows = stmt.query_map(param_refs.as_slice(), |row| {
295 Ok(BlockerRow {
296 issue_id: row.get(0)?,
297 identifier: row.get(1)?,
298 title: row.get(2)?,
299 state_name: row.get(3)?,
300 state_type: row.get(4)?,
301 })
302 })?;
303 for row in rows {
304 results.push(row?);
305 }
306 }
307 Ok(results)
308 })
309 }
310
311 pub fn count_issues(&self, team_key: Option<&str>, workspace_id: &str) -> Result<usize> {
312 self.with_conn(|conn| {
313 let count: usize = if let Some(team) = team_key {
314 conn.query_row(
315 "SELECT COUNT(*) FROM issues WHERE team_key = ?1 AND workspace_id = ?2",
316 rusqlite::params![team, workspace_id],
317 |row| row.get(0),
318 )?
319 } else {
320 conn.query_row(
321 "SELECT COUNT(*) FROM issues WHERE workspace_id = ?1",
322 rusqlite::params![workspace_id],
323 |row| row.get(0),
324 )?
325 };
326 Ok(count)
327 })
328 }
329
330 pub fn get_field_completeness(
332 &self,
333 team_key: Option<&str>,
334 workspace_id: &str,
335 ) -> Result<(usize, usize, usize, usize, usize)> {
336 self.with_conn(|conn| {
337 let (sql, params): (String, Vec<Box<dyn rusqlite::types::ToSql>>) =
338 if let Some(team) = team_key {
339 (
340 "SELECT COUNT(*),
341 SUM(CASE WHEN description IS NOT NULL AND description != '' THEN 1 ELSE 0 END),
342 SUM(CASE WHEN priority > 0 THEN 1 ELSE 0 END),
343 SUM(CASE WHEN labels_json != '[]' THEN 1 ELSE 0 END),
344 SUM(CASE WHEN project_name IS NOT NULL AND project_name != '' THEN 1 ELSE 0 END)
345 FROM issues WHERE team_key = ?1 AND workspace_id = ?2"
346 .to_string(),
347 vec![Box::new(team.to_string()) as Box<dyn rusqlite::types::ToSql>, Box::new(workspace_id.to_string())],
348 )
349 } else {
350 (
351 "SELECT COUNT(*),
352 SUM(CASE WHEN description IS NOT NULL AND description != '' THEN 1 ELSE 0 END),
353 SUM(CASE WHEN priority > 0 THEN 1 ELSE 0 END),
354 SUM(CASE WHEN labels_json != '[]' THEN 1 ELSE 0 END),
355 SUM(CASE WHEN project_name IS NOT NULL AND project_name != '' THEN 1 ELSE 0 END)
356 FROM issues WHERE workspace_id = ?1"
357 .to_string(),
358 vec![Box::new(workspace_id.to_string()) as Box<dyn rusqlite::types::ToSql>],
359 )
360 };
361 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
362 params.iter().map(|p| p.as_ref()).collect();
363 let row = conn.query_row(&sql, param_refs.as_slice(), |row| {
364 Ok((
365 row.get::<_, usize>(0)?,
366 row.get::<_, Option<usize>>(1)?.unwrap_or(0),
367 row.get::<_, Option<usize>>(2)?.unwrap_or(0),
368 row.get::<_, Option<usize>>(3)?.unwrap_or(0),
369 row.get::<_, Option<usize>>(4)?.unwrap_or(0),
370 ))
371 })?;
372 Ok(row)
373 })
374 }
375
376 #[allow(unused_assignments)]
379 pub fn list_all_issues(
380 &self,
381 team_key: Option<&str>,
382 filter: Option<&str>,
383 limit: usize,
384 offset: usize,
385 workspace_id: &str,
386 ) -> Result<Vec<IssueSummary>> {
387 self.with_conn(|conn| {
388 let mut conditions = Vec::new();
389 let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
390 let mut param_idx = 1;
391
392 conditions.push(format!("i.workspace_id = ?{param_idx}"));
394 params.push(Box::new(workspace_id.to_string()));
395 param_idx += 1;
396
397 if let Some(team) = team_key {
398 conditions.push(format!("i.team_key = ?{param_idx}"));
399 params.push(Box::new(team.to_string()));
400 param_idx += 1;
401 }
402
403 if let Some(text) = filter {
404 let like = format!("%{text}%");
405 conditions.push(format!(
406 "(i.identifier LIKE ?{} OR i.title LIKE ?{})",
407 param_idx,
408 param_idx + 1
409 ));
410 params.push(Box::new(like.clone()));
411 params.push(Box::new(like));
412 param_idx += 2;
413 }
414
415 let _ = param_idx;
416
417 let where_clause = if conditions.is_empty() {
418 String::new()
419 } else {
420 format!("WHERE {}", conditions.join(" AND "))
421 };
422
423 let limit_idx = params.len() + 1;
424 let offset_idx = params.len() + 2;
425
426 let sql = format!(
427 "SELECT i.id, i.identifier, i.team_key, i.title, i.state_name, i.state_type,
428 i.priority, i.project_name, i.labels_json, i.updated_at, i.url,
429 i.description IS NOT NULL AND i.description != '' AS has_desc,
430 EXISTS(SELECT 1 FROM chunks c WHERE c.issue_id = i.id) AS has_emb
431 FROM issues i
432 {where_clause}
433 ORDER BY i.updated_at DESC
434 LIMIT ?{limit_idx} OFFSET ?{offset_idx}"
435 );
436 params.push(Box::new(limit as i64));
437 params.push(Box::new(offset as i64));
438
439 let param_refs: Vec<&dyn rusqlite::types::ToSql> =
440 params.iter().map(|p| p.as_ref()).collect();
441 let mut stmt = conn.prepare(&sql)?;
442 let rows = stmt.query_map(param_refs.as_slice(), |row| {
443 let labels_json: String = row.get(8)?;
444 let labels: Vec<String> = serde_json::from_str(&labels_json).unwrap_or_default();
445 Ok(IssueSummary {
446 id: row.get(0)?,
447 identifier: row.get(1)?,
448 team_key: row.get(2)?,
449 title: row.get(3)?,
450 state_name: row.get(4)?,
451 state_type: row.get(5)?,
452 priority: row.get(6)?,
453 project_name: row.get(7)?,
454 labels,
455 updated_at: row.get(9)?,
456 url: row.get(10)?,
457 has_description: row.get(11)?,
458 has_embedding: row.get(12)?,
459 })
460 })?;
461 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
462 })
463 }
464
465 pub fn upsert_relations(&self, issue_id: &str, relations: &[Relation]) -> Result<()> {
468 self.with_conn(|conn| {
469 conn.execute(
470 "DELETE FROM issue_relations WHERE issue_id = ?1",
471 rusqlite::params![issue_id],
472 )?;
473 let mut stmt = conn.prepare(
474 "INSERT OR IGNORE INTO issue_relations (id, issue_id, related_issue_id, related_issue_identifier, relation_type)
475 VALUES (?1, ?2, ?3, ?4, ?5)"
476 )?;
477 for rel in relations {
478 stmt.execute(rusqlite::params![
479 rel.id, rel.issue_id, rel.related_issue_id,
480 rel.related_issue_identifier, rel.relation_type,
481 ])?;
482 }
483 Ok(())
484 })
485 }
486
487 pub fn get_relations_enriched(&self, issue_id: &str) -> Result<Vec<EnrichedRelation>> {
488 self.with_conn(|conn| {
489 let mut stmt = conn.prepare(
491 "SELECT r.id, r.relation_type, r.related_issue_identifier,
492 COALESCE(i.title, ''), COALESCE(i.state_name, ''), COALESCE(i.url, '')
493 FROM issue_relations r
494 LEFT JOIN issues i ON r.related_issue_id = i.id
495 WHERE r.issue_id = ?1",
496 )?;
497 let forward = stmt
498 .query_map(rusqlite::params![issue_id], |row| {
499 Ok(EnrichedRelation {
500 relation_id: row.get(0)?,
501 relation_type: row.get(1)?,
502 issue_identifier: row.get(2)?,
503 issue_title: row.get(3)?,
504 issue_state: row.get(4)?,
505 issue_url: row.get(5)?,
506 })
507 })?
508 .collect::<std::result::Result<Vec<_>, _>>()?;
509
510 let mut stmt2 = conn.prepare(
512 "SELECT r.id, r.relation_type, i2.identifier,
513 COALESCE(i2.title, ''), COALESCE(i2.state_name, ''), COALESCE(i2.url, '')
514 FROM issue_relations r
515 JOIN issues i ON r.related_issue_id = i.id
516 JOIN issues i2 ON r.issue_id = i2.id
517 WHERE r.related_issue_id = i.id AND i.id = ?1",
518 )?;
519 let inverse = stmt2
520 .query_map(rusqlite::params![issue_id], |row| {
521 let raw_type: String = row.get(1)?;
522 let flipped = match raw_type.as_str() {
523 "blocks" => "blocked_by".to_string(),
524 "blocked_by" => "blocks".to_string(),
525 other => other.to_string(), };
527 Ok(EnrichedRelation {
528 relation_id: row.get(0)?,
529 relation_type: flipped,
530 issue_identifier: row.get(2)?,
531 issue_title: row.get(3)?,
532 issue_state: row.get(4)?,
533 issue_url: row.get(5)?,
534 })
535 })?
536 .collect::<std::result::Result<Vec<_>, _>>()?;
537
538 let mut all = forward;
539 all.extend(inverse);
540 Ok(all)
541 })
542 }
543
544 pub fn find_relation_id(
546 &self,
547 issue_id: &str,
548 related_issue_id: &str,
549 relation_type: &str,
550 ) -> Result<Option<String>> {
551 self.with_conn(|conn| {
552 let mut stmt = conn.prepare(
553 "SELECT id FROM issue_relations WHERE issue_id = ?1 AND related_issue_id = ?2 AND relation_type = ?3"
554 )?;
555 let mut rows = stmt.query(rusqlite::params![issue_id, related_issue_id, relation_type])?;
556 if let Some(row) = rows.next()? {
557 Ok(Some(row.get(0)?))
558 } else {
559 Ok(None)
560 }
561 })
562 }
563
564 pub fn upsert_chunks(&self, issue_id: &str, chunks: &[(usize, String, Vec<u8>)]) -> Result<()> {
567 self.upsert_chunks_with_model(issue_id, chunks, "")
568 }
569
570 pub fn upsert_chunks_with_model(
571 &self,
572 issue_id: &str,
573 chunks: &[(usize, String, Vec<u8>)],
574 model_name: &str,
575 ) -> Result<()> {
576 self.with_conn(|conn| {
577 conn.execute(
578 "DELETE FROM chunks WHERE issue_id = ?1",
579 rusqlite::params![issue_id],
580 )?;
581 let mut stmt = conn.prepare(
582 "INSERT INTO chunks (issue_id, chunk_index, chunk_text, embedding, model_name) VALUES (?1, ?2, ?3, ?4, ?5)"
583 )?;
584 for (idx, text, embedding) in chunks {
585 stmt.execute(rusqlite::params![issue_id, idx, text, embedding, model_name])?;
586 }
587 Ok(())
588 })
589 }
590
591 pub fn get_embedding_model(&self, issue_id: &str) -> Result<Option<String>> {
593 self.with_conn(|conn| {
594 let mut stmt =
595 conn.prepare("SELECT model_name FROM chunks WHERE issue_id = ?1 LIMIT 1")?;
596 let mut rows = stmt.query(rusqlite::params![issue_id])?;
597 if let Some(row) = rows.next()? {
598 let name: String = row.get(0)?;
599 Ok(if name.is_empty() { None } else { Some(name) })
600 } else {
601 Ok(None)
602 }
603 })
604 }
605
606 pub fn get_all_chunks(&self, workspace_id: &str) -> Result<Vec<Chunk>> {
607 self.with_conn(|conn| {
608 let mut stmt = conn.prepare(
609 "SELECT c.issue_id, c.embedding, i.identifier
610 FROM chunks c JOIN issues i ON c.issue_id = i.id
611 WHERE i.workspace_id = ?1",
612 )?;
613 let rows = stmt.query_map(rusqlite::params![workspace_id], |row| {
614 Ok(Chunk {
615 issue_id: row.get(0)?,
616 embedding: row.get(1)?,
617 identifier: row.get(2)?,
618 })
619 })?;
620 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
621 })
622 }
623
624 pub fn get_chunks_for_team(&self, team_key: &str, workspace_id: &str) -> Result<Vec<Chunk>> {
625 self.with_conn(|conn| {
626 let mut stmt = conn.prepare(
627 "SELECT c.issue_id, c.embedding, i.identifier
628 FROM chunks c JOIN issues i ON c.issue_id = i.id
629 WHERE i.team_key = ?1 AND i.workspace_id = ?2",
630 )?;
631 let rows = stmt.query_map(rusqlite::params![team_key, workspace_id], |row| {
632 Ok(Chunk {
633 issue_id: row.get(0)?,
634 embedding: row.get(1)?,
635 identifier: row.get(2)?,
636 })
637 })?;
638 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
639 })
640 }
641
642 pub fn count_embedded_issues(
643 &self,
644 team_key: Option<&str>,
645 workspace_id: &str,
646 ) -> Result<usize> {
647 self.with_conn(|conn| {
648 let count: usize = if let Some(team) = team_key {
649 conn.query_row(
650 "SELECT COUNT(DISTINCT c.issue_id) FROM chunks c JOIN issues i ON c.issue_id = i.id WHERE i.team_key = ?1 AND i.workspace_id = ?2",
651 rusqlite::params![team, workspace_id],
652 |row| row.get(0),
653 )?
654 } else {
655 conn.query_row(
656 "SELECT COUNT(DISTINCT c.issue_id) FROM chunks c JOIN issues i ON c.issue_id = i.id WHERE i.workspace_id = ?1",
657 rusqlite::params![workspace_id],
658 |row| row.get(0),
659 )?
660 };
661 Ok(count)
662 })
663 }
664
665 pub fn get_issues_needing_embedding(
666 &self,
667 team_key: Option<&str>,
668 force: bool,
669 workspace_id: &str,
670 ) -> Result<Vec<Issue>> {
671 self.with_conn(|conn| {
672 let sql = if force {
673 if let Some(team) = team_key {
674 format!(
675 "SELECT id, identifier, team_key, title, description, state_name, state_type, priority, assignee_name, project_name, labels_json, created_at, updated_at, content_hash, synced_at, url, branch_name, workspace_id
676 FROM issues WHERE team_key = '{}' AND workspace_id = '{}'", team, workspace_id
677 )
678 } else {
679 format!(
680 "SELECT id, identifier, team_key, title, description, state_name, state_type, priority, assignee_name, project_name, labels_json, created_at, updated_at, content_hash, synced_at, url, branch_name, workspace_id
681 FROM issues WHERE workspace_id = '{}'", workspace_id
682 )
683 }
684 } else {
685 let team_filter = if let Some(team) = team_key {
686 format!("AND i.team_key = '{}'", team)
687 } else {
688 String::new()
689 };
690 format!(
691 "SELECT i.id, i.identifier, i.team_key, i.title, i.description, i.state_name, i.state_type, i.priority, i.assignee_name, i.project_name, i.labels_json, i.created_at, i.updated_at, i.content_hash, i.synced_at, i.url, i.branch_name, i.workspace_id
692 FROM issues i
693 LEFT JOIN (SELECT DISTINCT issue_id FROM chunks) c ON i.id = c.issue_id
694 WHERE c.issue_id IS NULL AND i.workspace_id = '{}' {}",
695 workspace_id, team_filter
696 )
697 };
698 let mut stmt = conn.prepare(&sql)?;
699 let rows = stmt.query_map([], |row| {
700 Ok(Issue::from_row(row).unwrap())
701 })?;
702 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
703 })
704 }
705
706 pub fn get_comments(&self, issue_id: &str) -> Result<Vec<Comment>> {
709 self.with_conn(|conn| {
710 let mut stmt = conn.prepare(
711 "SELECT id, issue_id, body, user_name, created_at FROM comments WHERE issue_id = ?1 ORDER BY created_at"
712 )?;
713 let rows = stmt.query_map(rusqlite::params![issue_id], |row| {
714 Ok(Comment {
715 id: row.get(0)?,
716 issue_id: row.get(1)?,
717 body: row.get(2)?,
718 user_name: row.get(3)?,
719 created_at: row.get(4)?,
720 })
721 })?;
722 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
723 })
724 }
725
726 pub fn get_sync_cursor(&self, workspace_id: &str, team_key: &str) -> Result<Option<String>> {
729 self.with_conn(|conn| {
730 let mut stmt = conn.prepare(
731 "SELECT last_updated_at FROM sync_state WHERE workspace_id = ?1 AND team_key = ?2",
732 )?;
733 let mut rows = stmt.query(rusqlite::params![workspace_id, team_key])?;
734 if let Some(row) = rows.next()? {
735 Ok(Some(row.get(0)?))
736 } else {
737 Ok(None)
738 }
739 })
740 }
741
742 pub fn set_sync_cursor(
743 &self,
744 workspace_id: &str,
745 team_key: &str,
746 last_updated_at: &str,
747 ) -> Result<()> {
748 self.with_conn(|conn| {
749 conn.execute(
750 "INSERT INTO sync_state (workspace_id, team_key, last_updated_at, full_sync_done, last_synced_at)
751 VALUES (?1, ?2, ?3, 1, datetime('now'))
752 ON CONFLICT(workspace_id, team_key) DO UPDATE SET last_updated_at=excluded.last_updated_at, full_sync_done=1, last_synced_at=datetime('now')",
753 rusqlite::params![workspace_id, team_key, last_updated_at],
754 )?;
755 Ok(())
756 })
757 }
758
759 pub fn is_full_sync_done(&self, workspace_id: &str, team_key: &str) -> Result<bool> {
760 self.with_conn(|conn| {
761 let mut stmt = conn.prepare(
762 "SELECT full_sync_done FROM sync_state WHERE workspace_id = ?1 AND team_key = ?2",
763 )?;
764 let mut rows = stmt.query(rusqlite::params![workspace_id, team_key])?;
765 if let Some(row) = rows.next()? {
766 let done: bool = row.get(0)?;
767 Ok(done)
768 } else {
769 Ok(false)
770 }
771 })
772 }
773
774 pub fn get_last_synced_at(&self, workspace_id: &str, team_key: &str) -> Result<Option<String>> {
776 self.with_conn(|conn| {
777 let mut stmt = conn.prepare(
778 "SELECT last_synced_at FROM sync_state WHERE workspace_id = ?1 AND team_key = ?2",
779 )?;
780 let mut rows = stmt.query(rusqlite::params![workspace_id, team_key])?;
781 if let Some(row) = rows.next()? {
782 Ok(row.get(0)?)
783 } else {
784 Ok(None)
785 }
786 })
787 }
788
789 pub fn get_metadata(&self, key: &str) -> Result<Option<String>> {
792 self.with_conn(|conn| {
793 let mut stmt = conn.prepare("SELECT value FROM metadata WHERE key = ?1")?;
794 let mut rows = stmt.query(rusqlite::params![key])?;
795 if let Some(row) = rows.next()? {
796 Ok(Some(row.get(0)?))
797 } else {
798 Ok(None)
799 }
800 })
801 }
802
803 pub fn set_metadata(&self, key: &str, value: &str) -> Result<()> {
804 self.with_conn(|conn| {
805 conn.execute(
806 "INSERT INTO metadata (key, value) VALUES (?1, ?2) ON CONFLICT(key) DO UPDATE SET value=excluded.value",
807 rusqlite::params![key, value],
808 )?;
809 Ok(())
810 })
811 }
812
813 pub fn list_synced_teams(&self, workspace_id: &str) -> Result<Vec<TeamSummary>> {
818 self.with_conn(|conn| {
819 let mut stmt = conn.prepare(
820 "SELECT i.team_key,
821 COUNT(DISTINCT i.id) AS issue_count,
822 COUNT(DISTINCT c.issue_id) AS embedded_count,
823 s.last_synced_at
824 FROM issues i
825 LEFT JOIN chunks c ON i.id = c.issue_id
826 LEFT JOIN sync_state s ON i.team_key = s.team_key AND s.workspace_id = ?1
827 WHERE i.workspace_id = ?1
828 GROUP BY i.team_key
829 ORDER BY i.team_key",
830 )?;
831 let rows = stmt.query_map(rusqlite::params![workspace_id], |row| {
832 Ok(TeamSummary {
833 key: row.get(0)?,
834 issue_count: row.get(1)?,
835 embedded_count: row.get(2)?,
836 last_synced_at: row.get(3)?,
837 })
838 })?;
839 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
840 })
841 }
842
843 pub fn fts_search(
844 &self,
845 query: &str,
846 limit: usize,
847 workspace_id: &str,
848 ) -> Result<Vec<FtsResult>> {
849 self.with_conn(|conn| {
850 let mut stmt = conn.prepare(
851 "SELECT i.id, i.identifier, i.title, i.state_name, i.priority, bm25(issues_fts) as rank
852 FROM issues_fts f
853 JOIN issues i ON f.rowid = i.rowid
854 WHERE issues_fts MATCH ?1 AND i.workspace_id = ?3
855 ORDER BY rank
856 LIMIT ?2"
857 )?;
858 let rows = stmt.query_map(rusqlite::params![query, limit, workspace_id], |row| {
859 Ok(FtsResult {
860 issue_id: row.get(0)?,
861 identifier: row.get(1)?,
862 title: row.get(2)?,
863 state_name: row.get(3)?,
864 priority: row.get(4)?,
865 bm25_score: row.get(5)?,
866 })
867 })?;
868 Ok(rows.collect::<std::result::Result<Vec<_>, _>>()?)
869 })
870 }
871}
872
873fn default_workspace_id() -> String {
876 "default".to_string()
877}
878
879#[derive(Debug, Clone, Serialize, Deserialize)]
880pub struct Issue {
881 pub id: String,
882 pub identifier: String,
883 pub team_key: String,
884 pub title: String,
885 pub description: Option<String>,
886 pub state_name: String,
887 pub state_type: String,
888 pub priority: i32,
889 pub assignee_name: Option<String>,
890 pub project_name: Option<String>,
891 pub labels_json: String,
892 pub created_at: String,
893 pub updated_at: String,
894 pub content_hash: String,
895 pub synced_at: Option<String>,
896 pub url: String,
897 pub branch_name: Option<String>,
898 #[serde(default = "default_workspace_id")]
899 pub workspace_id: String,
900}
901
902impl Issue {
903 pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
904 Ok(Self {
905 id: row.get(0)?,
906 identifier: row.get(1)?,
907 team_key: row.get(2)?,
908 title: row.get(3)?,
909 description: row.get(4)?,
910 state_name: row.get(5)?,
911 state_type: row.get(6)?,
912 priority: row.get(7)?,
913 assignee_name: row.get(8)?,
914 project_name: row.get(9)?,
915 labels_json: row.get(10)?,
916 created_at: row.get(11)?,
917 updated_at: row.get(12)?,
918 content_hash: row.get(13)?,
919 synced_at: row.get(14)?,
920 url: row.get(15)?,
921 branch_name: row.get(16).unwrap_or(None),
922 workspace_id: row.get(17).unwrap_or_else(|_| "default".to_string()),
923 })
924 }
925
926 pub fn labels(&self) -> Vec<String> {
927 serde_json::from_str(&self.labels_json).unwrap_or_default()
928 }
929
930 pub fn priority_label(&self) -> &str {
931 match self.priority {
932 0 => "No priority",
933 1 => "Urgent",
934 2 => "High",
935 3 => "Medium",
936 4 => "Low",
937 _ => "Unknown",
938 }
939 }
940}
941
942#[derive(Debug, Clone, Serialize, Deserialize)]
943pub struct Relation {
944 pub id: String,
945 pub issue_id: String,
946 pub related_issue_id: String,
947 pub related_issue_identifier: String,
948 pub relation_type: String,
949}
950
951#[derive(Debug, Clone, Serialize, Deserialize)]
952pub struct EnrichedRelation {
953 pub relation_id: String,
954 pub relation_type: String,
955 pub issue_identifier: String,
956 pub issue_title: String,
957 pub issue_state: String,
958 pub issue_url: String,
959}
960
961#[derive(Debug, Clone)]
962pub struct Chunk {
963 pub issue_id: String,
964 pub embedding: Vec<u8>,
965 pub identifier: String,
966}
967
968#[derive(Debug, Clone, Serialize, Deserialize)]
969pub struct Comment {
970 pub id: String,
971 pub issue_id: String,
972 pub body: String,
973 pub user_name: Option<String>,
974 pub created_at: String,
975}
976
977#[derive(Debug, Clone)]
978pub struct FtsResult {
979 pub issue_id: String,
980 pub identifier: String,
981 pub title: String,
982 pub state_name: String,
983 pub priority: i32,
984 pub bm25_score: f64,
985}
986
987#[derive(Debug, Clone)]
988pub struct IssueSummary {
989 pub id: String,
990 pub identifier: String,
991 pub team_key: String,
992 pub title: String,
993 pub state_name: String,
994 pub state_type: String,
995 pub priority: i32,
996 pub project_name: Option<String>,
997 pub labels: Vec<String>,
998 pub updated_at: String,
999 pub url: String,
1000 pub has_description: bool,
1001 pub has_embedding: bool,
1002}
1003
1004#[derive(Debug, Clone)]
1005pub struct TeamSummary {
1006 pub key: String,
1007 pub issue_count: usize,
1008 pub embedded_count: usize,
1009 pub last_synced_at: Option<String>,
1010}
1011
1012#[derive(Debug, Clone, Serialize, Deserialize)]
1013pub struct WorkspaceRow {
1014 pub id: String,
1015 pub linear_org_id: Option<String>,
1016 pub display_name: Option<String>,
1017 pub created_at: String,
1018}
1019
1020#[cfg(test)]
1021mod tests {
1022 use super::test_helpers::*;
1023
1024 #[test]
1025 fn count_embedded_issues_empty_db() {
1026 let (db, _dir) = test_db();
1027 assert_eq!(db.count_embedded_issues(None, "default").unwrap(), 0);
1028 }
1029
1030 #[test]
1031 fn count_embedded_issues_with_data() {
1032 let (db, _dir) = test_db();
1033
1034 let issue1 = make_issue("TST-1", "TST");
1035 let issue2 = make_issue("TST-2", "TST");
1036 let issue3 = make_issue("OTH-1", "OTH");
1037 db.upsert_issue(&issue1).unwrap();
1038 db.upsert_issue(&issue2).unwrap();
1039 db.upsert_issue(&issue3).unwrap();
1040
1041 db.upsert_chunks(&issue1.id, &[(0, "chunk".into(), fake_embedding(768))])
1043 .unwrap();
1044 db.upsert_chunks(&issue3.id, &[(0, "chunk".into(), fake_embedding(768))])
1045 .unwrap();
1046
1047 assert_eq!(db.count_embedded_issues(None, "default").unwrap(), 2);
1049 assert_eq!(db.count_embedded_issues(Some("TST"), "default").unwrap(), 1);
1051 assert_eq!(db.count_embedded_issues(Some("OTH"), "default").unwrap(), 1);
1052 assert_eq!(
1053 db.count_embedded_issues(Some("NONE"), "default").unwrap(),
1054 0
1055 );
1056 }
1057
1058 #[test]
1059 fn get_field_completeness_empty_db() {
1060 let (db, _dir) = test_db();
1061 let (total, desc, pri, labels, proj) = db.get_field_completeness(None, "default").unwrap();
1062 assert_eq!(total, 0);
1063 assert_eq!(desc, 0);
1064 assert_eq!(pri, 0);
1065 assert_eq!(labels, 0);
1066 assert_eq!(proj, 0);
1067 }
1068
1069 #[test]
1070 fn get_field_completeness_with_data() {
1071 let (db, _dir) = test_db();
1072
1073 let mut full = make_issue("TST-1", "TST");
1075 full.description = Some("Has desc".into());
1076 full.priority = 2;
1077 full.labels_json = r#"["bug"]"#.into();
1078 full.project_name = Some("Proj".into());
1079 db.upsert_issue(&full).unwrap();
1080
1081 let mut sparse = make_issue("TST-2", "TST");
1083 sparse.description = None;
1084 sparse.priority = 0;
1085 sparse.labels_json = "[]".into();
1086 sparse.project_name = None;
1087 db.upsert_issue(&sparse).unwrap();
1088
1089 let mut other = make_issue("OTH-1", "OTH");
1091 other.description = Some("Has desc".into());
1092 other.priority = 0;
1093 other.labels_json = "[]".into();
1094 other.project_name = None;
1095 db.upsert_issue(&other).unwrap();
1096
1097 let (total, desc, pri, labels, proj) = db.get_field_completeness(None, "default").unwrap();
1099 assert_eq!(total, 3);
1100 assert_eq!(desc, 2); assert_eq!(pri, 1); assert_eq!(labels, 1); assert_eq!(proj, 1); let (total, desc, pri, labels, proj) =
1107 db.get_field_completeness(Some("TST"), "default").unwrap();
1108 assert_eq!(total, 2);
1109 assert_eq!(desc, 1);
1110 assert_eq!(pri, 1);
1111 assert_eq!(labels, 1);
1112 assert_eq!(proj, 1);
1113 }
1114
1115 #[test]
1116 fn list_all_issues_pagination_and_filter() {
1117 let (db, _dir) = test_db();
1118
1119 for i in 1..=5 {
1120 let mut issue = make_issue(&format!("TST-{i}"), "TST");
1121 issue.updated_at = format!("2026-01-0{i}T00:00:00Z");
1122 db.upsert_issue(&issue).unwrap();
1123 }
1124 let mut other = make_issue("OTH-1", "OTH");
1125 other.updated_at = "2026-01-06T00:00:00Z".to_string();
1126 db.upsert_issue(&other).unwrap();
1127
1128 let page1 = db.list_all_issues(None, None, 3, 0, "default").unwrap();
1130 assert_eq!(page1.len(), 3);
1131 assert_eq!(page1[0].identifier, "OTH-1");
1133
1134 let page2 = db.list_all_issues(None, None, 3, 3, "default").unwrap();
1136 assert_eq!(page2.len(), 3);
1137
1138 let page3 = db.list_all_issues(None, None, 3, 6, "default").unwrap();
1140 assert_eq!(page3.len(), 0);
1141
1142 let tst = db
1144 .list_all_issues(Some("TST"), None, 10, 0, "default")
1145 .unwrap();
1146 assert_eq!(tst.len(), 5);
1147
1148 let filtered = db
1150 .list_all_issues(None, Some("TST-3"), 10, 0, "default")
1151 .unwrap();
1152 assert_eq!(filtered.len(), 1);
1153 assert_eq!(filtered[0].identifier, "TST-3");
1154
1155 let title_match = db
1157 .list_all_issues(None, Some("Test issue OTH"), 10, 0, "default")
1158 .unwrap();
1159 assert_eq!(title_match.len(), 1);
1160 }
1161
1162 #[test]
1163 fn list_all_issues_has_embedding_flag() {
1164 let (db, _dir) = test_db();
1165
1166 let issue1 = make_issue("TST-1", "TST");
1167 let issue2 = make_issue("TST-2", "TST");
1168 db.upsert_issue(&issue1).unwrap();
1169 db.upsert_issue(&issue2).unwrap();
1170
1171 db.upsert_chunks(&issue1.id, &[(0, "chunk".into(), fake_embedding(768))])
1173 .unwrap();
1174
1175 let issues = db.list_all_issues(None, None, 10, 0, "default").unwrap();
1176 let by_id: std::collections::HashMap<_, _> =
1177 issues.iter().map(|i| (i.identifier.as_str(), i)).collect();
1178
1179 assert!(by_id["TST-1"].has_embedding);
1180 assert!(!by_id["TST-2"].has_embedding);
1181 }
1182
1183 #[test]
1184 fn list_synced_teams_empty_db() {
1185 let (db, _dir) = test_db();
1186 let teams = db.list_synced_teams("default").unwrap();
1187 assert!(teams.is_empty());
1188 }
1189
1190 #[test]
1191 fn list_synced_teams_with_data() {
1192 let (db, _dir) = test_db();
1193
1194 for i in 1..=3 {
1196 let issue = make_issue(&format!("TST-{i}"), "TST");
1197 db.upsert_issue(&issue).unwrap();
1198 if i <= 2 {
1199 db.upsert_chunks(&issue.id, &[(0, "chunk".into(), fake_embedding(768))])
1201 .unwrap();
1202 }
1203 }
1204 let other = make_issue("OTH-1", "OTH");
1205 db.upsert_issue(&other).unwrap();
1206
1207 let teams = db.list_synced_teams("default").unwrap();
1208 assert_eq!(teams.len(), 2);
1209
1210 let by_key: std::collections::HashMap<_, _> =
1212 teams.iter().map(|t| (t.key.as_str(), t)).collect();
1213
1214 assert_eq!(by_key["TST"].issue_count, 3);
1215 assert_eq!(by_key["TST"].embedded_count, 2);
1216 assert_eq!(by_key["OTH"].issue_count, 1);
1217 assert_eq!(by_key["OTH"].embedded_count, 0);
1218 }
1219
1220 #[test]
1221 fn list_synced_teams_includes_last_synced_at() {
1222 let (db, _dir) = test_db();
1223
1224 let issue = make_issue("TST-1", "TST");
1225 db.upsert_issue(&issue).unwrap();
1226
1227 let teams = db.list_synced_teams("default").unwrap();
1229 assert_eq!(teams.len(), 1);
1230 assert!(teams[0].last_synced_at.is_none());
1231
1232 db.set_sync_cursor("default", "TST", "2026-01-01T00:00:00Z")
1234 .unwrap();
1235 let teams = db.list_synced_teams("default").unwrap();
1236 assert!(teams[0].last_synced_at.is_some());
1237 }
1238
1239 #[test]
1240 fn list_synced_teams_multi_chunk_issue() {
1241 let (db, _dir) = test_db();
1242
1243 let issue = make_issue("TST-1", "TST");
1244 db.upsert_issue(&issue).unwrap();
1245 db.upsert_chunks(
1247 &issue.id,
1248 &[
1249 (0, "chunk0".into(), fake_embedding(768)),
1250 (1, "chunk1".into(), fake_embedding(768)),
1251 (2, "chunk2".into(), fake_embedding(768)),
1252 ],
1253 )
1254 .unwrap();
1255
1256 let teams = db.list_synced_teams("default").unwrap();
1257 assert_eq!(teams.len(), 1);
1258 assert_eq!(teams[0].issue_count, 1); assert_eq!(teams[0].embedded_count, 1);
1260 }
1261
1262 #[test]
1263 fn workspace_crud() {
1264 let (db, _dir) = test_db();
1265
1266 let ws = db.get_workspace("default").unwrap();
1268 assert!(ws.is_some());
1269
1270 db.upsert_workspace("work", None, None).unwrap();
1272 let ws = db.get_workspace("work").unwrap().unwrap();
1273 assert_eq!(ws.id, "work");
1274 assert!(ws.linear_org_id.is_none());
1275
1276 db.upsert_workspace("work", Some("org-123"), Some("Work Org"))
1278 .unwrap();
1279 let ws = db.get_workspace("work").unwrap().unwrap();
1280 assert_eq!(ws.linear_org_id.as_deref(), Some("org-123"));
1281 assert_eq!(ws.display_name.as_deref(), Some("Work Org"));
1282
1283 let all = db.list_workspaces().unwrap();
1285 assert_eq!(all.len(), 2);
1286
1287 db.delete_workspace("work").unwrap();
1289 let ws = db.get_workspace("work").unwrap();
1290 assert!(ws.is_none());
1291 }
1292
1293 #[test]
1294 fn issues_isolated_by_workspace() {
1295 let (db, _dir) = test_db();
1296
1297 db.upsert_workspace("work", None, None).unwrap();
1299
1300 let mut issue1 = make_issue("TST-1", "TST");
1302 issue1.workspace_id = "default".to_string();
1303 issue1.priority = 0;
1304 db.upsert_issue(&issue1).unwrap();
1305
1306 let mut issue2 = make_issue("TST-2", "TST");
1308 issue2.id = "id-2".to_string();
1309 issue2.workspace_id = "work".to_string();
1310 issue2.priority = 0;
1311 db.upsert_issue(&issue2).unwrap();
1312
1313 assert_eq!(db.count_issues(None, "default").unwrap(), 1);
1315 assert_eq!(db.count_issues(None, "work").unwrap(), 1);
1316
1317 let default_unpri = db.get_unprioritized_issues(None, false, "default").unwrap();
1319 assert_eq!(default_unpri.len(), 1);
1320 assert_eq!(default_unpri[0].identifier, "TST-1");
1321
1322 let work_unpri = db.get_unprioritized_issues(None, false, "work").unwrap();
1323 assert_eq!(work_unpri.len(), 1);
1324 assert_eq!(work_unpri[0].identifier, "TST-2");
1325 }
1326
1327 #[test]
1328 fn sync_state_isolated_by_workspace() {
1329 let (db, _dir) = test_db();
1330 db.upsert_workspace("work", None, None).unwrap();
1331
1332 db.set_sync_cursor("default", "TST", "2024-01-01T00:00:00Z")
1334 .unwrap();
1335 db.set_sync_cursor("work", "TST", "2024-06-01T00:00:00Z")
1336 .unwrap();
1337
1338 assert_eq!(
1339 db.get_sync_cursor("default", "TST").unwrap().as_deref(),
1340 Some("2024-01-01T00:00:00Z")
1341 );
1342 assert_eq!(
1343 db.get_sync_cursor("work", "TST").unwrap().as_deref(),
1344 Some("2024-06-01T00:00:00Z")
1345 );
1346
1347 assert!(db.is_full_sync_done("default", "TST").unwrap());
1348 assert!(db.is_full_sync_done("work", "TST").unwrap());
1349 assert!(!db.is_full_sync_done("default", "OTHER").unwrap());
1350 }
1351
1352 #[test]
1353 fn list_synced_teams_workspace_scoped() {
1354 let (db, _dir) = test_db();
1355 db.upsert_workspace("work", None, None).unwrap();
1356
1357 let mut issue1 = make_issue("TST-1", "TST");
1358 issue1.workspace_id = "default".to_string();
1359 db.upsert_issue(&issue1).unwrap();
1360
1361 let mut issue2 = make_issue("WRK-1", "WRK");
1362 issue2.id = "id-wrk".to_string();
1363 issue2.workspace_id = "work".to_string();
1364 db.upsert_issue(&issue2).unwrap();
1365
1366 let default_teams = db.list_synced_teams("default").unwrap();
1367 assert_eq!(default_teams.len(), 1);
1368 assert_eq!(default_teams[0].key, "TST");
1369
1370 let work_teams = db.list_synced_teams("work").unwrap();
1371 assert_eq!(work_teams.len(), 1);
1372 assert_eq!(work_teams[0].key, "WRK");
1373 }
1374}