use rusqlite::{params, Connection, Result};
use serde::Serialize;
use std::collections::{HashSet, HashMap};
use std::path::{Path, PathBuf};
use crate::frontparse::{parse_frontmatter, Frontmatter};
use crate::links;
#[derive(Debug, Serialize)]
pub struct Note {
pub filename: String,
pub title: String,
pub summary: Option<String>,
pub date: Option<String>,
pub time: Option<String>,
pub tags: Option<Vec<String>>,
pub note_type: Option<String>,
pub uplink: Option<String>,
pub content: String,
pub backlinks: Vec<String>,
}
fn db_path(notebook_path: &Path) -> PathBuf {
notebook_path.join(".smarana").join("index.db")
}
pub fn init_db(notebook_path: &Path) -> Result<()> {
let path = db_path(notebook_path);
let conn = Connection::open(&path)?;
conn.execute(
"CREATE TABLE IF NOT EXISTS notes (
filename TEXT PRIMARY KEY,
title TEXT NOT NULL,
summary TEXT,
date TEXT,
time TEXT,
tags TEXT,
type TEXT DEFAULT 'fleeting',
uplink TEXT,
content TEXT NOT NULL DEFAULT ''
)",
[],
)?;
conn.execute(
"CREATE TABLE IF NOT EXISTS note_links (
source TEXT NOT NULL,
target_slug TEXT NOT NULL,
PRIMARY KEY (source, target_slug),
FOREIGN KEY (source) REFERENCES notes(filename) ON DELETE CASCADE
)",
[],
)?;
Ok(())
}
pub fn init_db_force(notebook_path: &Path) -> Result<()> {
let path = db_path(notebook_path);
let conn = Connection::open(&path)?;
conn.execute("DROP TABLE IF EXISTS note_links", [])?;
conn.execute("DROP TABLE IF EXISTS notes", [])?;
init_db(notebook_path)
}
pub fn upsert_note(notebook_path: &Path, filename: &str, fm: &Frontmatter, content: &str) -> Result<()> {
let path = db_path(notebook_path);
let conn = Connection::open(&path)?;
let title = fm.title.as_deref().unwrap_or("null");
let tags_json = if let Some(tags) = &fm.tags {
serde_json::to_string(tags).ok()
} else {
None
};
let note_type = fm.note_type.as_deref().unwrap_or("fleeting");
let uplink_str = fm.uplink.clone();
conn.execute(
"INSERT INTO notes (filename, title, summary, date, time, tags, type, uplink, content)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)
ON CONFLICT(filename) DO UPDATE SET
title=excluded.title,
summary=excluded.summary,
date=excluded.date,
time=excluded.time,
tags=excluded.tags,
type=excluded.type,
uplink=excluded.uplink,
content=excluded.content",
params![
filename,
title,
fm.summary,
fm.date,
fm.time,
tags_json,
note_type,
uplink_str,
content
],
)?;
Ok(())
}
fn make_slug(title: &str) -> String {
let re = regex::Regex::new(r"[^a-zA-Z0-9]+").unwrap();
let slug = re.replace_all(title, "-").trim_matches('-').to_string();
if slug.is_empty() { "untitled".to_string() } else { slug }
}
fn topological_sort_atomic(notes: &[(String, String, Option<String>, Option<String>, Option<String>)]) -> Vec<String> {
let mut slug_to_filename: HashMap<String, String> = HashMap::new();
let mut filename_to_note: HashMap<String, &(String, String, Option<String>, Option<String>, Option<String>)> = HashMap::new();
for note in notes {
slug_to_filename.insert(make_slug(¬e.1), note.0.clone());
filename_to_note.insert(note.0.clone(), note);
}
let mut dependents: HashMap<String, Vec<String>> = HashMap::new();
let mut has_parent: HashSet<String> = HashSet::new();
for note in notes {
if let Some(uplink_slug) = ¬e.4 {
if let Some(parent_filename) = slug_to_filename.get(uplink_slug) {
dependents.entry(parent_filename.clone()).or_default().push(note.0.clone());
has_parent.insert(note.0.clone());
}
}
}
let cmp_date_desc = |a: &String, b: &String| {
let na = filename_to_note.get(a).unwrap();
let nb = filename_to_note.get(b).unwrap();
let da = (na.2.as_deref().unwrap_or(""), na.3.as_deref().unwrap_or(""));
let db = (nb.2.as_deref().unwrap_or(""), nb.3.as_deref().unwrap_or(""));
db.cmp(&da) };
let mut roots: Vec<String> = notes.iter()
.filter(|note| !has_parent.contains(¬e.0))
.map(|note| note.0.clone())
.collect();
roots.sort_by(&cmp_date_desc);
let mut result = Vec::new();
let mut visited = HashSet::new();
let mut visiting = HashSet::new();
fn dfs(
node: String,
dependents: &HashMap<String, Vec<String>>,
visited: &mut HashSet<String>,
visiting: &mut HashSet<String>,
result: &mut Vec<String>,
cmp: &impl Fn(&String, &String) -> std::cmp::Ordering
) {
if visiting.contains(&node) || visited.contains(&node) {
return;
}
visiting.insert(node.clone());
result.push(node.clone());
if let Some(mut children) = dependents.get(&node).cloned() {
children.sort_by(cmp);
for child in children {
dfs(child, dependents, visited, visiting, result, cmp);
}
}
visiting.remove(&node);
visited.insert(node);
}
for root in roots {
dfs(root, &dependents, &mut visited, &mut visiting, &mut result, &cmp_date_desc);
}
let mut all_nodes: Vec<String> = notes.iter().map(|n| n.0.clone()).collect();
all_nodes.sort_by(&cmp_date_desc);
for node in all_nodes {
if !visited.contains(&node) {
dfs(node, &dependents, &mut visited, &mut visiting, &mut result, &cmp_date_desc);
}
}
result
}
pub fn export_smarana_typ(notebook_path: &Path) {
let path = db_path(notebook_path);
let conn = match Connection::open(&path) {
Ok(c) => c,
Err(e) => {
eprintln!("Failed to open database for smarana.typ export: {}", e);
return;
}
};
let mut content = String::from("// Auto-generated by smarana — do not edit manually\n\n#import \".smarana/user.typ\": conf, section-divider, tag-index-page\n#show: conf\n\n");
content.push_str("// ═══ Fleeting Notes ═══\n");
content.push_str("#section-divider(\"FLEETING\", \"fleeting\")\n\n");
{
let mut stmt = conn
.prepare("SELECT filename FROM notes WHERE type = 'fleeting' ORDER BY date DESC, time DESC")
.unwrap();
let filenames: Vec<String> = stmt
.query_map([], |row| row.get(0))
.into_iter()
.flatten()
.filter_map(|r| r.ok())
.collect();
for f in &filenames {
content.push_str(&format!("#include \"{}\"\n", f));
}
}
content.push('\n');
content.push_str("// ═══ Capture Notes ═══\n");
content.push_str("#section-divider(\"CAPTURE\", \"capture\")\n\n");
{
let mut stmt = conn
.prepare("SELECT filename FROM notes WHERE type = 'capture' ORDER BY date DESC, time DESC")
.unwrap();
let filenames: Vec<String> = stmt
.query_map([], |row| row.get(0))
.into_iter()
.flatten()
.filter_map(|r| r.ok())
.collect();
for f in &filenames {
content.push_str(&format!("#include \"{}\"\n", f));
}
}
content.push('\n');
content.push_str("// ═══ Atomic Notes ═══\n");
content.push_str("#section-divider(\"ATOMIC\", \"atomic\")\n\n");
{
let mut stmt = conn
.prepare("SELECT filename, title, date, time, uplink FROM notes WHERE type = 'atomic' ORDER BY date ASC, time ASC")
.unwrap();
let atomic_notes: Vec<(String, String, Option<String>, Option<String>, Option<String>)> = stmt
.query_map([], |row| {
Ok((
row.get(0)?,
row.get(1)?,
row.get(2)?,
row.get(3)?,
row.get(4)?
))
})
.into_iter()
.flatten()
.filter_map(|r| r.ok())
.collect();
let sorted = topological_sort_atomic(&atomic_notes);
for f in &sorted {
content.push_str(&format!("#include \"{}\"\n", f));
}
}
content.push('\n');
content.push_str("// ═══ Appendix — Tag Index ═══\n");
content.push_str("#section-divider(\"APPENDIX\", \"appendix\")\n\n");
{
let mut stmt = conn
.prepare("SELECT title, tags FROM notes WHERE tags IS NOT NULL AND tags != '[]' AND tags != '' ORDER BY title ASC")
.unwrap();
let mut tag_map: std::collections::BTreeMap<String, Vec<(String, String)>> = std::collections::BTreeMap::new();
let rows: Vec<(String, String)> = stmt
.query_map([], |row| {
Ok((
row.get::<_, String>(0)?,
row.get::<_, String>(1)?,
))
})
.into_iter()
.flatten()
.filter_map(|r| r.ok())
.collect();
for (title, tags_json) in &rows {
if let Ok(tags) = serde_json::from_str::<Vec<String>>(tags_json) {
let slug = make_slug(title);
for tag in tags {
tag_map.entry(tag).or_default().push((title.clone(), slug.clone()));
}
}
}
for (tag, notes) in &tag_map {
content.push_str(&format!("#tag-index-page(\"{}\", (\n", escape_typst_string(tag)));
for (title, slug) in notes {
content.push_str(&format!(" (\"{}\", \"{}\"),\n", escape_typst_string(title), slug));
}
content.push_str("))\n\n");
}
}
let smarana_path = notebook_path.join("smarana.typ");
if let Err(e) = std::fs::write(&smarana_path, &content) {
eprintln!("Failed to write smarana.typ: {}", e);
}
}
fn escape_typst_string(s: &str) -> String {
s.replace('\\', "\\\\").replace('"', "\\\"")
}
fn collect_typ_files(notebook_path: &Path) -> Vec<PathBuf> {
let Ok(entries) = std::fs::read_dir(notebook_path) else {
return Vec::new();
};
entries
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| {
p.is_file()
&& p.extension().map(|x| x == "typ").unwrap_or(false)
&& p.file_name().map(|n| n != "smarana.typ").unwrap_or(false)
&& !p.to_string_lossy().contains(".smarana")
})
.collect()
}
pub fn sync(notebook_path: &Path) {
let old_slug_map: HashMap<String, String> = {
let path = db_path(notebook_path);
if let Ok(conn) = Connection::open(&path) {
let mut stmt = match conn.prepare("SELECT filename, title FROM notes") {
Ok(s) => s,
Err(_) => {
conn.prepare("SELECT 1 WHERE 0").unwrap()
}
};
stmt.query_map([], |row| {
Ok((
row.get::<_, String>(0)?,
row.get::<_, String>(1)?,
))
})
.into_iter()
.flatten()
.filter_map(|r| r.ok())
.map(|(filename, title)| (filename, links::make_slug(&title)))
.collect()
} else {
HashMap::new()
}
};
if let Err(e) = init_db_force(notebook_path) {
eprintln!("Failed to initialize database schema: {}", e);
return;
}
let typ_files = collect_typ_files(notebook_path);
let mut new_slug_map: HashMap<String, String> = HashMap::new();
for path in &typ_files {
if let Ok(content) = std::fs::read_to_string(path) {
let rel_path = path
.strip_prefix(notebook_path)
.unwrap_or(path)
.to_string_lossy()
.to_string();
let normalized_path = rel_path.replace("\\", "/");
let fm = parse_frontmatter(&content, &normalized_path);
let slug = links::make_slug(fm.title.as_deref().unwrap_or("untitled"));
new_slug_map.insert(normalized_path.clone(), slug);
if let Err(e) = upsert_note(notebook_path, &normalized_path, &fm, &content) {
eprintln!("Failed to register note '{}': {}", normalized_path, e);
}
}
}
let renames: Vec<links::SlugRename> = old_slug_map
.iter()
.filter_map(|(filename, old_slug)| {
if let Some(new_slug) = new_slug_map.get(filename) {
if new_slug != old_slug {
crate::vprintln!(
"links: detected rename '{}' → '{}' in {}",
old_slug,
new_slug,
filename
);
return Some(links::SlugRename {
filename: filename.clone(),
old_slug: old_slug.clone(),
new_slug: new_slug.clone(),
});
}
}
None
})
.collect();
let rewritten = links::rewrite_renames(notebook_path, &renames);
for path in &rewritten {
if let Ok(content) = std::fs::read_to_string(path) {
let rel_path = path
.strip_prefix(notebook_path)
.unwrap_or(path)
.to_string_lossy()
.to_string();
let normalized_path = rel_path.replace("\\", "/");
let fm = parse_frontmatter(&content, &normalized_path);
if let Err(e) = upsert_note(notebook_path, &normalized_path, &fm, &content) {
eprintln!("Failed to re-register rewritten note '{}': {}", normalized_path, e);
}
}
}
let db_conn_path = db_path(notebook_path);
if let Ok(conn) = Connection::open(&db_conn_path) {
for path in &typ_files {
if let Ok(content) = std::fs::read_to_string(path) {
let rel_path = path
.strip_prefix(notebook_path)
.unwrap_or(path)
.to_string_lossy()
.to_string();
let normalized_path = rel_path.replace("\\", "/");
let found_slugs = links::parse_links(&content);
if let Err(e) = links::upsert_links(&conn, &normalized_path, &found_slugs) {
eprintln!("Failed to index links for '{}': {}", normalized_path, e);
}
}
}
}
export_smarana_typ(notebook_path);
}
pub fn list_notes(notebook_path: &Path) -> Result<Vec<Note>> {
let path = db_path(notebook_path);
let conn = Connection::open(&path)?;
let mut stmt = conn.prepare(
"SELECT filename, title, summary, date, time, tags, type, uplink, content FROM notes ORDER BY date DESC, time DESC",
)?;
let note_iter = stmt.query_map([], |row| {
let tags_str: Option<String> = row.get(5)?;
let tags = if let Some(ts) = tags_str {
serde_json::from_str(&ts).ok()
} else {
None
};
let uplink: Option<String> = row.get(7)?;
Ok(Note {
filename: row.get(0)?,
title: row.get(1)?,
summary: row.get(2)?,
date: row.get(3)?,
time: row.get(4)?,
tags,
note_type: row.get(6)?,
uplink,
content: row.get(8)?,
backlinks: Vec::new(), })
})?;
let mut notes: Vec<Note> = note_iter.filter_map(|r| r.ok()).collect();
for note in &mut notes {
let my_slug = links::make_slug(¬e.title);
let mut bl_stmt = conn.prepare(
"SELECT source FROM note_links WHERE target_slug = ?1 ORDER BY source ASC",
)?;
let bl: Vec<String> = bl_stmt
.query_map(params![my_slug], |row| row.get(0))
.into_iter()
.flatten()
.filter_map(|r| r.ok())
.collect();
note.backlinks = bl;
}
Ok(notes)
}