1use std::{
2 collections::{HashMap, HashSet},
3 path::{Path, PathBuf},
4};
5
6use serde::{Deserialize, Serialize};
7use sqlx::Executor;
8use tokio::fs;
9
10const MIGRATIONS: [&str; 1] = [include_str!("../migrations/0_data.sql")];
11
12#[derive(Debug)]
13pub struct View {
14 pub host: String,
15 pub link: Link,
16 pub repo: Option<Repo>,
17}
18
19#[derive(Serialize, Deserialize, Debug, Clone, Hash, Eq, PartialEq)]
20pub enum Link {
21 Fs { dir: PathBuf },
22 Net { url: String },
23}
24
25#[derive(Serialize, Deserialize, Debug)]
26pub struct Branch {
27 pub roots: HashSet<String>,
28 pub leaf: String,
29}
30
31#[derive(Serialize, Deserialize, Debug)]
32pub struct Repo {
33 pub description: Option<String>,
34 pub remotes: HashMap<String, String>,
35 pub branches: HashMap<String, Branch>,
36}
37
38pub struct Storage {
39 pool: sqlx::Pool<sqlx::Sqlite>,
40}
41
42impl Storage {
43 pub async fn connect<P: AsRef<Path>>(file: P) -> anyhow::Result<Self> {
44 let file = file.as_ref();
45 if let Some(parent) = file.parent() {
46 fs::create_dir_all(&parent).await?;
47 }
48 let url = format!("sqlite://{}?mode=rwc", file.to_string_lossy());
49 let pool = sqlx::sqlite::SqlitePoolOptions::new()
50 .max_connections(5)
51 .connect(&url)
52 .await?;
53 let selph = Self { pool };
54 for migration in MIGRATIONS {
55 selph.pool.execute(migration).await?;
56 }
57 Ok(selph)
58 }
59
60 pub async fn store_views(&self, views: &[View]) -> anyhow::Result<()> {
61 let mut tx = self.pool.begin().await?;
62 for view in views {
63 let View { host, link, repo } = view;
64 let link = serde_json::to_string(link)?;
65 let repo = serde_json::to_string(repo)?;
66 let _id = sqlx::query(
67 "INSERT OR REPLACE INTO views (host, link, repo) VALUES (?, ?, ?)"
68 )
69 .bind(host)
70 .bind(link)
71 .bind(repo)
72 .execute(&mut *tx).await?.last_insert_rowid();
73 }
74 tx.commit().await?;
75 Ok(())
76 }
77}