use anyhow::Result;
use clap::{Parser, Subcommand};
mod tui;
#[derive(Parser)]
#[command(name = "task-journal", version, about = "Task Journal CLI", long_about = None)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
Create {
title: String,
#[arg(long)]
context: Option<String>,
},
Events {
#[command(subcommand)]
action: EventsCmd,
},
RebuildState,
Pack {
task_id: String,
#[arg(long, default_value = "compact")]
mode: String,
},
Event {
task_id: String,
#[arg(long, name = "type")]
r#type: String,
#[arg(long)]
text: String,
#[arg(long)]
corrects: Option<String>,
#[arg(long)]
supersedes: Option<String>,
},
Close {
task_id: String,
#[arg(long)]
reason: Option<String>,
},
Search {
query: String,
#[arg(long, default_value_t = 20)]
limit: usize,
#[arg(long)]
all_projects: bool,
},
EventCorrect {
#[arg(long)]
corrects: String,
#[arg(long)]
task: String,
#[arg(long)]
text: String,
},
InstallHooks {
#[arg(long, default_value = "user")]
scope: String,
#[arg(long)]
uninstall: bool,
},
Stats,
#[command(alias = "tui")]
Ui {
#[arg(long)]
project: Option<String>,
},
Backfill {
#[arg(long)]
dry_run: bool,
#[arg(long)]
limit: Option<usize>,
#[arg(long)]
project: Option<String>,
},
Export {
#[arg(long, default_value = "md")]
format: String,
#[arg(long)]
task: Option<String>,
#[arg(long)]
project: Option<String>,
},
IngestHook {
#[arg(long)]
kind: String,
#[arg(long)]
text: String,
#[arg(long, default_value = "cli")]
backend: String,
#[arg(long, hide = true)]
mock_event_type: Option<String>,
#[arg(long, hide = true)]
mock_task_id: Option<String>,
#[arg(long, hide = true)]
mock_confidence: Option<f64>,
},
}
#[derive(Subcommand)]
enum EventsCmd {
List {
#[arg(long, default_value_t = 20)]
limit: usize,
},
}
fn main() -> Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Create { title, context } => {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_dir = tj_core::paths::events_dir()?;
let events_path = events_dir.join(format!("{project_hash}.jsonl"));
std::fs::create_dir_all(&events_dir)?;
let task_id = format!(
"tj-{}",
&ulid::Ulid::new().to_string()[10..16].to_lowercase()
);
let mut event = tj_core::event::Event::new(
task_id.clone(),
tj_core::event::EventType::Open,
tj_core::event::Author::User,
tj_core::event::Source::Cli,
context.clone().unwrap_or_else(|| title.clone()),
);
event.meta = serde_json::json!({ "title": title });
let mut writer = tj_core::storage::JsonlWriter::open(&events_path)?;
writer.append(&event)?;
writer.flush_durable()?;
println!("{}", task_id);
}
Commands::Events { action } => match action {
EventsCmd::List { limit } => {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path =
tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
if !events_path.exists() {
println!("(no events yet)");
return Ok(());
}
let body = std::fs::read_to_string(&events_path)?;
let mut events: Vec<tj_core::event::Event> = body
.lines()
.filter(|l| !l.trim().is_empty())
.map(serde_json::from_str)
.collect::<Result<_, _>>()?;
events.reverse();
for e in events.into_iter().take(limit) {
let title = e
.meta
.get("title")
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.unwrap_or_else(|| e.text.clone());
println!("{} [{:?}] {}", e.timestamp, e.event_type, title);
}
}
},
Commands::Pack { task_id, mode } => {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path = tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
let state_path = tj_core::paths::state_dir()?.join(format!("{project_hash}.sqlite"));
let conn = tj_core::db::open(&state_path)?;
if events_path.exists() {
tj_core::db::rebuild_state(&conn, &events_path, &project_hash)?;
}
let pmode = match mode.as_str() {
"compact" => tj_core::pack::PackMode::Compact,
"full" => tj_core::pack::PackMode::Full,
other => anyhow::bail!("unknown mode: {other}"),
};
let pack = tj_core::pack::assemble(&conn, &task_id, pmode)?;
print!("{}", pack.text);
}
Commands::RebuildState => {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path = tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
let state_path = tj_core::paths::state_dir()?.join(format!("{project_hash}.sqlite"));
if !events_path.exists() {
anyhow::bail!("no events file at {events_path:?}");
}
let conn = tj_core::db::open(&state_path)?;
let n = tj_core::db::rebuild_state(&conn, &events_path, &project_hash)?;
println!("rebuilt {n} events into {state_path:?}");
}
Commands::Event {
task_id,
r#type,
text,
corrects,
supersedes,
} => {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path = tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
std::fs::create_dir_all(events_path.parent().unwrap())?;
let event_type = parse_event_type(&r#type)?;
let mut event = tj_core::event::Event::new(
&task_id,
event_type,
tj_core::event::Author::User,
tj_core::event::Source::Cli,
text,
);
event.corrects = corrects;
event.supersedes = supersedes;
let mut writer = tj_core::storage::JsonlWriter::open(&events_path)?;
writer.append(&event)?;
writer.flush_durable()?;
println!("{}", event.event_id);
}
Commands::Close { task_id, reason } => {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path = tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
let mut event = tj_core::event::Event::new(
&task_id,
tj_core::event::EventType::Close,
tj_core::event::Author::User,
tj_core::event::Source::Cli,
reason.clone().unwrap_or_else(|| "(closed)".into()),
);
if let Some(r) = reason {
event.meta = serde_json::json!({"reason": r});
}
let mut writer = tj_core::storage::JsonlWriter::open(&events_path)?;
writer.append(&event)?;
writer.flush_durable()?;
println!("{}", event.event_id);
}
Commands::EventCorrect {
corrects,
task,
text,
} => {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path = tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
std::fs::create_dir_all(events_path.parent().unwrap())?;
let mut event = tj_core::event::Event::new(
&task,
tj_core::event::EventType::Correction,
tj_core::event::Author::User,
tj_core::event::Source::Cli,
text,
);
event.corrects = Some(corrects);
let mut writer = tj_core::storage::JsonlWriter::open(&events_path)?;
writer.append(&event)?;
writer.flush_durable()?;
println!("{}", event.event_id);
}
Commands::InstallHooks { scope, uninstall } => {
let settings_path = match scope.as_str() {
"user" => {
let home =
std::env::var_os("HOME").ok_or_else(|| anyhow::anyhow!("HOME not set"))?;
std::path::PathBuf::from(home)
.join(".claude")
.join("settings.json")
}
"project" => std::env::current_dir()?
.join(".claude")
.join("settings.json"),
other => anyhow::bail!("unknown scope: {other}"),
};
if let Some(p) = settings_path.parent() {
std::fs::create_dir_all(p)?;
}
let mut current: serde_json::Value = if settings_path.exists() {
serde_json::from_str(&std::fs::read_to_string(&settings_path)?)
.unwrap_or_else(|_| serde_json::json!({}))
} else {
serde_json::json!({})
};
let hooks_obj = current
.as_object_mut()
.ok_or_else(|| anyhow::anyhow!("settings is not a JSON object"))?;
if uninstall {
hooks_obj.remove("hooks");
} else {
let cmd = "task-journal ingest-hook --kind=$CLAUDE_HOOK_NAME --text=\"$CLAUDE_HOOK_TEXT\" --backend=cli || true";
let entries = serde_json::json!({
"UserPromptSubmit": [{ "matcher": "", "hooks": [{ "type": "command", "command": cmd }] }],
"PostToolUse": [{ "matcher": "", "hooks": [{ "type": "command", "command": cmd }] }],
"Stop": [{ "matcher": "", "hooks": [{ "type": "command", "command": cmd }] }],
});
hooks_obj.insert("hooks".into(), entries);
}
std::fs::write(&settings_path, serde_json::to_string_pretty(¤t)?)?;
println!("{}", settings_path.display());
}
Commands::Stats => {
let metrics_dir = tj_core::paths::metrics_dir()?;
let mut total = 0usize;
let mut confirmed = 0usize;
let mut suggested = 0usize;
let mut errors = 0usize;
if metrics_dir.exists() {
for entry in std::fs::read_dir(&metrics_dir)? {
let path = entry?.path();
if path.extension().and_then(|e| e.to_str()) != Some("jsonl") {
continue;
}
let body = std::fs::read_to_string(&path)?;
for line in body.lines().filter(|l| !l.trim().is_empty()) {
total += 1;
let v: serde_json::Value = match serde_json::from_str(line) {
Ok(v) => v,
Err(_) => {
errors += 1;
continue;
}
};
match v.get("status").and_then(|s| s.as_str()) {
Some("confirmed") => confirmed += 1,
Some("suggested") => suggested += 1,
_ => {}
}
}
}
}
println!("classified: {total}");
println!(" confirmed: {confirmed}");
println!(" suggested: {suggested}");
println!(" parse errors: {errors}");
if total > 0 {
let ratio = confirmed as f64 / total as f64 * 100.0;
println!(" confirmed ratio: {ratio:.1}%");
}
}
Commands::IngestHook {
kind,
text,
backend,
mock_event_type,
mock_task_id,
mock_confidence,
} => {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path = tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
std::fs::create_dir_all(events_path.parent().unwrap())?;
drain_pending(
&events_path,
mock_event_type.as_deref(),
mock_task_id.as_deref(),
mock_confidence,
)?;
let author_hint = if kind.contains("UserPrompt") {
"user"
} else {
"assistant"
};
let (etype, task_id, confidence, evidence_strength, suggested_text) =
if let (Some(t), Some(tid)) =
(mock_event_type.as_deref(), mock_task_id.as_deref())
{
(
parse_event_type(t)?,
tid.to_string(),
mock_confidence.unwrap_or(1.0),
None,
None,
)
} else {
let state_path =
tj_core::paths::state_dir()?.join(format!("{project_hash}.sqlite"));
let conn = tj_core::db::open(&state_path)?;
if events_path.exists() {
tj_core::db::rebuild_state(&conn, &events_path, &project_hash)?;
}
let recent = recent_task_contexts(&conn, 5)?;
if recent.is_empty() {
return Ok(());
}
use tj_core::classifier::Classifier;
let classifier: Box<dyn Classifier> = match backend.as_str() {
"cli" => {
Box::new(tj_core::classifier::cli::ClaudeCliClassifier::default())
}
"api" => {
Box::new(tj_core::classifier::http::AnthropicClassifier::from_env()?)
}
other => {
anyhow::bail!("unknown backend: {other} (expected `cli` or `api`)")
}
};
let input = tj_core::classifier::ClassifyInput {
text: text.clone(),
author_hint: author_hint.into(),
recent_tasks: recent,
};
let out = match classifier.classify(&input) {
Ok(o) => o,
Err(e) => {
persist_pending(&events_path, &text, &e.to_string())?;
return Ok(());
}
};
let Some(tid) = out.task_id_guess else {
return Ok(());
};
(
out.event_type,
tid,
out.confidence,
out.evidence_strength,
Some(out.suggested_text),
)
};
let event_text = suggested_text.unwrap_or(text);
let mut event = tj_core::event::Event::new(
&task_id,
etype,
tj_core::event::Author::Classifier,
tj_core::event::Source::Hook,
event_text,
);
event.confidence = Some(confidence);
event.status = tj_core::classifier::decide_status(confidence);
event.evidence_strength = evidence_strength;
let mut writer = tj_core::storage::JsonlWriter::open(&events_path)?;
writer.append(&event)?;
writer.flush_durable()?;
let metrics_path = tj_core::paths::metrics_dir()?.join(format!("{project_hash}.jsonl"));
let etype_str = serde_json::to_value(etype)?
.as_str()
.unwrap_or("?")
.to_string();
let status_str = serde_json::to_value(event.status)?
.as_str()
.unwrap_or("?")
.to_string();
let _ = tj_core::classifier::telemetry::append(
&metrics_path,
&tj_core::classifier::telemetry::TelemetryRecord {
timestamp: chrono::Utc::now()
.to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
project_hash: project_hash.clone(),
task_id_guess: Some(task_id.clone()),
event_type: etype_str,
confidence,
status: status_str,
error: None,
},
);
println!("{}", event.event_id);
}
Commands::Export {
format,
task,
project,
} => {
let cwd = match project {
Some(p) => std::path::PathBuf::from(p),
None => std::env::current_dir()?,
};
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path = tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
if !events_path.exists() {
anyhow::bail!("no events file at {events_path:?}");
}
let body = std::fs::read_to_string(&events_path)?;
let all_events: Vec<tj_core::event::Event> = body
.lines()
.filter(|l| !l.trim().is_empty())
.map(serde_json::from_str)
.collect::<Result<_, _>>()?;
let events: Vec<&tj_core::event::Event> = if let Some(ref tid) = task {
all_events.iter().filter(|e| e.task_id == *tid).collect()
} else {
all_events.iter().collect()
};
if events.is_empty() {
if let Some(tid) = task {
anyhow::bail!("no events found for task {tid}");
} else {
anyhow::bail!("no events in project");
}
}
match format.as_str() {
"json" => {
let json = serde_json::to_string_pretty(&events)?;
println!("{json}");
}
"md" => {
println!("# Task Journal Export\n");
let mut tasks: std::collections::BTreeMap<
String,
Vec<&tj_core::event::Event>,
> = std::collections::BTreeMap::new();
for e in &events {
tasks.entry(e.task_id.clone()).or_default().push(e);
}
for (task_id, task_events) in &tasks {
let title = task_events
.iter()
.find(|e| e.event_type == tj_core::event::EventType::Open)
.and_then(|e| {
e.meta
.get("title")
.and_then(|v| v.as_str())
.map(String::from)
.or_else(|| Some(e.text.clone()))
})
.unwrap_or_else(|| "(untitled)".into());
let status = if task_events
.last()
.map(|e| e.event_type == tj_core::event::EventType::Close)
.unwrap_or(false)
{
"closed"
} else {
"open"
};
let created = task_events
.first()
.map(|e| e.timestamp.as_str())
.unwrap_or("?");
println!("## [{task_id}] {title}");
println!("**Status**: {status} ");
println!("**Created**: {created}\n");
println!("### Timeline");
for e in task_events {
let etype = serde_json::to_value(e.event_type)
.ok()
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_else(|| "?".into());
println!("- **[{}] {}**: {}", e.timestamp, etype, e.text);
}
println!();
}
}
other => anyhow::bail!("unknown format: {other} (expected `md` or `json`)"),
}
}
Commands::Search {
query,
limit,
all_projects,
} => {
if all_projects {
let state_dir = tj_core::paths::state_dir()?;
let hashes = tj_core::db::list_all_projects(&state_dir)?;
for hash in hashes {
let path = state_dir.join(format!("{hash}.sqlite"));
let conn = match rusqlite::Connection::open(&path) {
Ok(c) => c,
Err(_) => continue,
};
let mut stmt = match conn.prepare(
"SELECT DISTINCT task_id FROM search_fts WHERE search_fts MATCH ?1 LIMIT ?2"
) {
Ok(s) => s,
Err(_) => continue,
};
let rows = match stmt.query_map(rusqlite::params![&query, limit as i64], |r| {
r.get::<_, String>(0)
}) {
Ok(r) => r,
Err(_) => continue,
};
for id in rows.flatten() {
println!("{hash}\t{id}");
}
}
} else {
let cwd = std::env::current_dir()?;
let project_hash = tj_core::project_hash::from_path(&cwd)?;
let events_path =
tj_core::paths::events_dir()?.join(format!("{project_hash}.jsonl"));
let state_path =
tj_core::paths::state_dir()?.join(format!("{project_hash}.sqlite"));
let conn = tj_core::db::open(&state_path)?;
if events_path.exists() {
tj_core::db::rebuild_state(&conn, &events_path, &project_hash)?;
}
let mut stmt = conn.prepare(
"SELECT DISTINCT task_id FROM search_fts WHERE search_fts MATCH ?1 LIMIT ?2",
)?;
let ids: Vec<String> = stmt
.query_map(rusqlite::params![query, limit as i64], |r| {
r.get::<_, String>(0)
})?
.collect::<Result<_, _>>()?;
for id in ids {
println!("{id}");
}
}
}
Commands::Ui { project } => {
let project_path = match project {
Some(p) => std::path::PathBuf::from(p),
None => std::env::current_dir()?,
};
let mut app = tui::app::App::new(&project_path)?;
if app.session_list.sessions.is_empty() {
eprintln!("No Claude Code sessions found for: {}", project_path.display());
return Ok(());
}
app.run()?;
}
Commands::Backfill {
dry_run,
limit,
project,
} => {
use tj_core::session::{discovery, extractor, parser};
let project_path = match project {
Some(p) => std::path::PathBuf::from(p),
None => std::env::current_dir()?,
};
let project_hash = tj_core::project_hash::from_path(&project_path)?;
let events_dir = tj_core::paths::events_dir()?;
let events_path = events_dir.join(format!("{project_hash}.jsonl"));
let proj_dir = discovery::find_project_dir(&project_path)?;
let proj_dir = match proj_dir {
Some(d) => d,
None => {
eprintln!(
"No Claude Code sessions found for: {}",
project_path.display()
);
eprintln!(
"Looked in: {}",
discovery::projects_dir()
.map(|p| p.display().to_string())
.unwrap_or_else(|_| "?".into())
);
return Ok(());
}
};
let mut sessions = discovery::list_sessions(&proj_dir)?;
if let Some(max) = limit {
sessions.truncate(max);
}
if sessions.is_empty() {
eprintln!("No session JSONL files found in: {}", proj_dir.display());
return Ok(());
}
eprintln!(
"Found {} session(s) for {}",
sessions.len(),
project_path.display()
);
let already_imported = if events_path.exists() {
let content = std::fs::read_to_string(&events_path).unwrap_or_default();
sessions
.iter()
.filter_map(|p| p.file_stem().and_then(|s| s.to_str()).map(String::from))
.filter(|sid| content.contains(sid))
.collect::<std::collections::HashSet<_>>()
} else {
std::collections::HashSet::new()
};
let mut total_tasks = 0;
let mut total_events = 0;
for session_path in &sessions {
let session_id = session_path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("?")
.to_string();
if already_imported.contains(&session_id) {
eprintln!(" ⊘ {} — already imported, skipping", &session_id[..8.min(session_id.len())]);
continue;
}
let parsed = match parser::parse_session(session_path) {
Ok(p) => p,
Err(e) => {
eprintln!(
" ✗ {} — parse error: {}",
&session_id[..8.min(session_id.len())],
e
);
continue;
}
};
let task = match extractor::extract_from_session(&parsed) {
Some(t) => t,
None => {
eprintln!(
" ⊘ {} — too small ({} msgs), skipping",
&session_id[..8.min(session_id.len())],
parsed.user_message_count()
);
continue;
}
};
if dry_run {
eprintln!(
" ▸ {} → task {} \"{}\" ({} events)",
&session_id[..8.min(session_id.len())],
task.task_id,
task.title.chars().take(60).collect::<String>(),
task.events.len()
);
for ev in &task.events {
let etype = serde_json::to_value(ev.event_type)
.ok()
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_else(|| "?".into());
eprintln!(
" {:12} {}",
etype,
ev.text.chars().take(80).collect::<String>()
);
}
} else {
std::fs::create_dir_all(&events_dir)?;
let mut writer = tj_core::storage::JsonlWriter::open(&events_path)?;
for event in &task.events {
writer.append(event)?;
}
writer.flush_durable()?;
eprintln!(
" ✓ {} → {} \"{}\" ({} events)",
&session_id[..8.min(session_id.len())],
task.task_id,
task.title.chars().take(60).collect::<String>(),
task.events.len()
);
}
total_tasks += 1;
total_events += task.events.len();
}
if dry_run {
eprintln!("\nDry run: would create {total_tasks} task(s) with {total_events} event(s).");
eprintln!("Run without --dry-run to import.");
} else {
eprintln!("\nImported {total_tasks} task(s) with {total_events} event(s).");
}
}
}
Ok(())
}
fn recent_task_contexts(
conn: &rusqlite::Connection,
limit: usize,
) -> anyhow::Result<Vec<tj_core::classifier::TaskContext>> {
let mut stmt = conn.prepare(
"SELECT task_id, title FROM tasks WHERE status='open' ORDER BY last_event_at DESC LIMIT ?1",
)?;
let task_rows: Vec<(String, String)> = stmt
.query_map(rusqlite::params![limit as i64], |r| {
Ok((r.get::<_, String>(0)?, r.get::<_, String>(1)?))
})?
.collect::<Result<_, _>>()?;
let mut out = Vec::with_capacity(task_rows.len());
for (task_id, title) in task_rows {
let mut e_stmt = conn.prepare(
"SELECT ei.type, sf.text FROM events_index ei
LEFT JOIN search_fts sf ON sf.event_id = ei.event_id
WHERE ei.task_id=?1 ORDER BY ei.timestamp DESC LIMIT 3",
)?;
let last_events: Vec<String> = e_stmt
.query_map(rusqlite::params![task_id], |r| {
let ty: String = r.get(0)?;
let txt: Option<String> = r.get(1)?;
Ok(format!(
"[{ty}] {}",
txt.unwrap_or_default().chars().take(80).collect::<String>()
))
})?
.collect::<Result<_, _>>()?;
out.push(tj_core::classifier::TaskContext {
task_id,
title,
last_events,
});
}
Ok(out)
}
fn persist_pending(events_path: &std::path::Path, text: &str, err: &str) -> anyhow::Result<()> {
let pending_dir = events_path
.parent()
.unwrap()
.parent()
.unwrap()
.join("pending");
std::fs::create_dir_all(&pending_dir)?;
let id = ulid::Ulid::new().to_string();
let payload = serde_json::json!({"text": text, "error": err, "queued_at": chrono::Utc::now().to_rfc3339()});
std::fs::write(
pending_dir.join(format!("{id}.json")),
serde_json::to_string_pretty(&payload)?,
)?;
Ok(())
}
fn drain_pending(
events_path: &std::path::Path,
mock_etype: Option<&str>,
mock_tid: Option<&str>,
mock_conf: Option<f64>,
) -> anyhow::Result<()> {
let pending_dir = events_path
.parent()
.unwrap()
.parent()
.unwrap()
.join("pending");
if !pending_dir.exists() {
return Ok(());
}
for entry in std::fs::read_dir(&pending_dir)? {
let entry = entry?;
if entry.path().extension().and_then(|e| e.to_str()) != Some("json") {
continue;
}
let body = std::fs::read_to_string(entry.path())?;
let v: serde_json::Value = serde_json::from_str(&body)?;
let text = v
.get("text")
.and_then(|x| x.as_str())
.unwrap_or("")
.to_string();
if !text.is_empty() {
if let (Some(t), Some(tid)) = (mock_etype, mock_tid) {
let mut event = tj_core::event::Event::new(
tid,
parse_event_type(t)?,
tj_core::event::Author::Classifier,
tj_core::event::Source::Hook,
text,
);
event.confidence = mock_conf;
event.status = tj_core::classifier::decide_status(mock_conf.unwrap_or(1.0));
let mut writer = tj_core::storage::JsonlWriter::open(events_path)?;
writer.append(&event)?;
writer.flush_durable()?;
}
}
std::fs::remove_file(entry.path())?;
}
Ok(())
}
fn parse_event_type(s: &str) -> anyhow::Result<tj_core::event::EventType> {
use tj_core::event::EventType::*;
Ok(match s {
"open" => Open,
"hypothesis" => Hypothesis,
"finding" => Finding,
"evidence" => Evidence,
"decision" => Decision,
"rejection" => Rejection,
"constraint" => Constraint,
"correction" => Correction,
"reopen" => Reopen,
"supersede" => Supersede,
"close" => Close,
"redirect" => Redirect,
other => anyhow::bail!("unknown event type: {other}"),
})
}