use std::fmt::Write as _;
use std::path::{Path, PathBuf};
use std::process::Command;
use anyhow::Result;
use clap::{Parser, Subcommand};
use soma_studio_core::AppConfig;
use soma_studio_server::{
IndexedSourceFileRow, StudioStorage, embedded_web_asset_count, embedded_web_shell_available,
prepare_server,
};
#[derive(Debug, Parser)]
#[command(name = "soma-studio")]
#[command(about = "Local-first browser studio powered by Ranvier and SvelteKit")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
Serve {
#[arg(long)]
no_open: bool,
},
Init,
Doctor,
}
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| "info,soma_studio=debug".into()),
)
.init();
let cli = Cli::parse();
match cli.command {
Commands::Serve { no_open } => serve(no_open).await?,
Commands::Init => init()?,
Commands::Doctor => doctor().await?,
}
Ok(())
}
async fn serve(no_open: bool) -> Result<()> {
let config = AppConfig::from_env()?;
let server = prepare_server(config).await?;
println!("Soma Studio bootstrap URL:");
println!("{}", server.bootstrap_url());
if !no_open {
let _ = open_in_browser(server.bootstrap_url());
}
server.run().await
}
fn init() -> Result<()> {
let config = AppConfig::from_env()?;
config.ensure_directories()?;
println!("Initialized Soma Studio directories");
println!(" data: {}", config.data_dir.display());
println!(" derived: {}", config.derived_dir.display());
println!(" assets: {}", config.user_assets_dir.display());
Ok(())
}
async fn doctor() -> Result<()> {
let config = AppConfig::from_env()?;
print!("{}", build_doctor_report(&config).await);
Ok(())
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct DoctorCheck {
label: &'static str,
status: &'static str,
detail: String,
}
impl DoctorCheck {
fn ok(label: &'static str, detail: impl Into<String>) -> Self {
Self {
label,
status: "ok",
detail: detail.into(),
}
}
fn warn(label: &'static str, detail: impl Into<String>) -> Self {
Self {
label,
status: "warn",
detail: detail.into(),
}
}
fn missing(label: &'static str, detail: impl Into<String>) -> Self {
Self {
label,
status: "missing",
detail: detail.into(),
}
}
fn error(label: &'static str, detail: impl Into<String>) -> Self {
Self {
label,
status: "error",
detail: detail.into(),
}
}
}
async fn build_doctor_report(config: &AppConfig) -> String {
let mut checks = config_doctor_checks(config);
checks.extend(storage_doctor_checks(config).await);
let mut report = String::new();
writeln!(&mut report, "Soma Studio doctor report").expect("write report header");
for check in checks {
writeln!(
&mut report,
" {:<20} {:<7} {}",
format!("{}:", check.label),
check.status,
check.detail
)
.expect("write report line");
}
report
}
fn config_doctor_checks(config: &AppConfig) -> Vec<DoctorCheck> {
let mut checks = vec![
match config.bind_socket_addr() {
Ok(addr) => DoctorCheck::ok("bind", addr.to_string()),
Err(error) => DoctorCheck::error("bind", error.to_string()),
},
dir_check("project root", &config.project_root),
dir_check("data dir", &config.data_dir),
dir_check("derived dir", &config.derived_dir),
dir_check("notebook root", &config.notebook_dir),
dir_check("assets dir", &config.user_assets_dir),
];
checks.extend(web_asset_checks(
&config.web_build_dir,
&config.web_shell_file,
embedded_web_shell_available(),
embedded_web_asset_count(),
));
checks.push(notebook_notes_check(&config.notebook_dir));
checks.push(derived_artifacts_check(
&config.derived_dir,
&config.notebook_dir,
));
checks.push(search_index_check(config));
checks
}
async fn storage_doctor_checks(config: &AppConfig) -> Vec<DoctorCheck> {
if !config.db_path.exists() {
return vec![
DoctorCheck::missing("db", config.db_path.display().to_string()),
DoctorCheck::warn("db schema", "not available until db is created"),
DoctorCheck::warn("provider selection", "not available until db is created"),
DoctorCheck::warn("provider tests", "not available until db is created"),
DoctorCheck::warn("source roots", "not available until db is created"),
DoctorCheck::warn("ingest status", "not available until db is created"),
DoctorCheck::warn("search corpus", "not available until db is created"),
];
}
let storage = match StudioStorage::open(config).await {
Ok(storage) => storage,
Err(error) => {
return vec![DoctorCheck::error(
"db",
format!("failed to open {}: {error}", config.db_path.display()),
)];
}
};
let mut checks = vec![DoctorCheck::ok(
"db",
format!("opened {}", config.db_path.display()),
)];
checks.push(match storage.schema_status().await {
Ok(status) if status.is_current() => DoctorCheck::ok(
"db schema",
format!(
"version {} of {}",
status.current_version, status.expected_version
),
),
Ok(status) => DoctorCheck::warn(
"db schema",
format!(
"version {} of {}, migration required",
status.current_version, status.expected_version
),
),
Err(error) => DoctorCheck::error("db schema", error.to_string()),
});
let provider_selection = storage.load_provider_selection().await;
let selected_provider_model = provider_selection.as_ref().ok().and_then(|selection| {
Some((
selection.selected_provider.clone()?,
selection.selected_model_id.clone()?,
))
});
checks.push(match provider_selection {
Ok(selection) => match (selection.selected_provider, selection.selected_model_id) {
(Some(provider), Some(model)) => {
DoctorCheck::ok("provider selection", format!("{provider}/{model}"))
}
(Some(provider), None) => DoctorCheck::warn(
"provider selection",
format!("{provider} selected, no model"),
),
(None, _) => DoctorCheck::warn("provider selection", "no provider selected"),
},
Err(error) => DoctorCheck::error("provider selection", error.to_string()),
});
checks.push(match storage.list_provider_statuses().await {
Ok(statuses) if statuses.is_empty() => {
DoctorCheck::warn("provider tests", "no stored provider test results")
}
Ok(statuses) => provider_tests_check(&statuses),
Err(error) => DoctorCheck::error("provider tests", error.to_string()),
});
checks.push(match storage.list_source_roots().await {
Ok(source_roots) if source_roots.is_empty() => {
DoctorCheck::warn("source roots", "no source roots registered")
}
Ok(source_roots) => {
DoctorCheck::ok("source roots", format!("{} registered", source_roots.len()))
}
Err(error) => DoctorCheck::error("source roots", error.to_string()),
});
checks.push(match storage.load_ingest_status().await {
Ok(status) => DoctorCheck::ok(
"ingest status",
format!(
"{} source files, {} indexed text files, running: {}",
status.total_source_files, status.indexed_text_files, status.running
),
),
Err(error) => DoctorCheck::error("ingest status", error.to_string()),
});
match storage.list_indexed_source_files().await {
Ok(files) => {
checks.push(source_root_artifacts_check(
&config.derived_dir,
&files,
selected_provider_model,
));
checks.push(search_corpus_check(
&config.derived_dir,
&config.notebook_dir,
&files,
));
}
Err(error) => {
let detail = error.to_string();
checks.push(DoctorCheck::error("source-root artifacts", detail.clone()));
checks.push(DoctorCheck::error("search corpus", detail));
}
}
checks
}
fn provider_tests_check(statuses: &[soma_studio_server::ProviderStatusRow]) -> DoctorCheck {
let failed = statuses
.iter()
.filter(|status| status.last_test_ok == Some(false))
.count();
let passed = statuses
.iter()
.filter(|status| status.last_test_ok == Some(true))
.count();
let unknown = statuses.len().saturating_sub(passed + failed);
let detail = format!("{passed} passed, {failed} failed, {unknown} unknown");
if failed == 0 && unknown == 0 {
DoctorCheck::ok("provider tests", detail)
} else {
DoctorCheck::warn("provider tests", detail)
}
}
fn dir_check(label: &'static str, path: &Path) -> DoctorCheck {
if path.is_dir() {
DoctorCheck::ok(label, path.display().to_string())
} else {
DoctorCheck::missing(label, path.display().to_string())
}
}
fn file_check(label: &'static str, path: &Path) -> DoctorCheck {
if path.is_file() {
DoctorCheck::ok(label, path.display().to_string())
} else {
DoctorCheck::missing(label, path.display().to_string())
}
}
fn web_asset_checks(
web_build_dir: &Path,
web_shell_file: &Path,
embedded_shell_available: bool,
embedded_asset_count: usize,
) -> Vec<DoctorCheck> {
if web_shell_file.is_file() {
return vec![
dir_check("web build dir", web_build_dir),
file_check("web shell", web_shell_file),
];
}
if embedded_shell_available {
let build_dir = if web_build_dir.is_dir() {
DoctorCheck::ok("web build dir", web_build_dir.display().to_string())
} else {
DoctorCheck::warn(
"web build dir",
format!(
"{} missing; {embedded_asset_count} embedded assets available",
web_build_dir.display()
),
)
};
return vec![
build_dir,
DoctorCheck::ok(
"web shell",
format!("embedded spa.html available ({embedded_asset_count} assets)"),
),
];
}
vec![
dir_check("web build dir", web_build_dir),
file_check("web shell", web_shell_file),
]
}
fn notebook_notes_check(notebook_dir: &Path) -> DoctorCheck {
if !notebook_dir.is_dir() {
return DoctorCheck::missing("notebook notes", "notebook root is missing");
}
match count_notebook_notes(notebook_dir) {
Ok((total, markdown, typst)) if total > 0 => DoctorCheck::ok(
"notebook notes",
format!("{total} notes: {markdown} markdown, {typst} typst"),
),
Ok(_) => DoctorCheck::warn("notebook notes", "no .md or .typ notes found"),
Err(error) => DoctorCheck::error("notebook notes", error),
}
}
#[derive(Debug, Default)]
struct DerivedArtifactSummary {
file_count: usize,
corrupt_json: usize,
missing_notebook_artifacts: usize,
stale_notebook_artifacts: usize,
samples: DiagnosticSamples,
}
#[derive(Debug, Default)]
struct SourceRootArtifactSummary {
indexed_files: usize,
source_missing: usize,
text_missing: usize,
text_stale: usize,
chunk_missing: usize,
chunk_stale: usize,
chunk_corrupt: usize,
embedding_missing: usize,
embedding_stale: usize,
embedding_corrupt: usize,
embedding_mismatch: usize,
embedding_checked: bool,
samples: DiagnosticSamples,
}
#[derive(Debug, Default)]
struct SearchCorpusSummary {
notebook_notes: usize,
source_files: usize,
notebook_ready_chunks: usize,
source_ready_chunks: usize,
notebook_chunk_missing: usize,
notebook_chunk_stale: usize,
notebook_chunk_corrupt: usize,
notebook_chunk_empty: usize,
source_missing: usize,
source_chunk_missing: usize,
source_chunk_stale: usize,
source_chunk_corrupt: usize,
source_chunk_empty: usize,
samples: DiagnosticSamples,
}
const DIAGNOSTIC_SAMPLE_LIMIT: usize = 3;
#[derive(Debug, Default)]
struct DiagnosticSamples {
items: Vec<String>,
omitted: usize,
}
impl DiagnosticSamples {
fn push(&mut self, sample: impl Into<String>) {
if self.items.len() < DIAGNOSTIC_SAMPLE_LIMIT {
self.items.push(sample.into());
} else {
self.omitted += 1;
}
}
fn is_empty(&self) -> bool {
self.items.is_empty()
}
fn detail(&self) -> String {
let mut details = self.items.join("; ");
if self.omitted > 0 {
if !details.is_empty() {
details.push_str("; ");
}
details.push_str(&format!("{} more omitted", self.omitted));
}
details
}
}
fn derived_artifacts_check(derived_dir: &Path, notebook_dir: &Path) -> DoctorCheck {
if !derived_dir.is_dir() {
return DoctorCheck::missing("derived artifacts", "derived directory is missing");
}
match summarize_derived_artifacts(derived_dir, notebook_dir) {
Ok(summary) if summary.has_no_artifacts() => {
DoctorCheck::warn("derived artifacts", "no derived artifact files found")
}
Ok(summary) if summary.has_diagnostics() => {
DoctorCheck::warn("derived artifacts", summary.detail())
}
Ok(summary) => DoctorCheck::ok("derived artifacts", summary.detail()),
Err(error) => DoctorCheck::error("derived artifacts", error),
}
}
fn source_root_artifacts_check(
derived_dir: &Path,
files: &[IndexedSourceFileRow],
provider_model: Option<(String, String)>,
) -> DoctorCheck {
if files.is_empty() {
return DoctorCheck::warn(
"source-root artifacts",
"no indexed source files to inspect",
);
}
let summary = summarize_source_root_artifacts(derived_dir, files, provider_model.as_ref());
if summary.has_diagnostics() {
DoctorCheck::warn("source-root artifacts", summary.detail())
} else {
DoctorCheck::ok("source-root artifacts", summary.detail())
}
}
fn search_index_check(config: &AppConfig) -> DoctorCheck {
let status = soma_studio_server::search_index_status(config);
let detail = if status.ready {
format!(
"{} documents, {} chunks, schema {}, tokenizer {}, rebuilt {}, synced {}",
status.document_count,
status.chunk_count,
status.schema_version,
status.tokenizer_profile,
status.last_rebuilt_at.as_deref().unwrap_or("unknown"),
status.last_synced_at.as_deref().unwrap_or("unknown")
)
} else if status.diagnostics.is_empty() {
"search index is not ready".to_string()
} else {
status.diagnostics.join("; ")
};
if status.ready {
DoctorCheck::ok("search index", detail)
} else {
DoctorCheck::warn("search index", detail)
}
}
fn search_corpus_check(
derived_dir: &Path,
notebook_dir: &Path,
files: &[IndexedSourceFileRow],
) -> DoctorCheck {
if !derived_dir.is_dir() {
return DoctorCheck::missing("search corpus", "derived directory is missing");
}
match summarize_search_corpus(derived_dir, notebook_dir, files) {
Ok(summary) if summary.ready_chunks() == 0 && !summary.has_diagnostics() => {
DoctorCheck::warn("search corpus", summary.detail())
}
Ok(summary) if summary.has_diagnostics() => {
DoctorCheck::warn("search corpus", summary.detail())
}
Ok(summary) => DoctorCheck::ok("search corpus", summary.detail()),
Err(error) => DoctorCheck::error("search corpus", error),
}
}
fn count_notebook_notes(root: &Path) -> std::result::Result<(usize, usize, usize), String> {
let mut total = 0;
let mut markdown = 0;
let mut typst = 0;
visit_files(
root,
&mut |path| match path.extension().and_then(|extension| extension.to_str()) {
Some("md") => {
total += 1;
markdown += 1;
}
Some("typ") => {
total += 1;
typst += 1;
}
_ => {}
},
)?;
Ok((total, markdown, typst))
}
fn summarize_derived_artifacts(
derived_dir: &Path,
notebook_dir: &Path,
) -> std::result::Result<DerivedArtifactSummary, String> {
let mut summary = DerivedArtifactSummary::default();
visit_files(derived_dir, &mut |path| {
summary.file_count += 1;
if is_json_file(path) && !is_readable_json(path) {
summary.corrupt_json += 1;
let artifact = display_relative_path(derived_dir, path);
push_diagnostic_sample(&mut summary.samples, format!("corrupt json {artifact}"));
}
})?;
if notebook_dir.is_dir() {
visit_files(notebook_dir, &mut |path| {
if is_notebook_note(path) {
inspect_notebook_artifacts(&mut summary, derived_dir, notebook_dir, path);
}
})?;
}
Ok(summary)
}
fn summarize_source_root_artifacts(
derived_dir: &Path,
files: &[IndexedSourceFileRow],
provider_model: Option<&(String, String)>,
) -> SourceRootArtifactSummary {
let mut summary = SourceRootArtifactSummary {
indexed_files: files.len(),
embedding_checked: provider_model.is_some(),
..SourceRootArtifactSummary::default()
};
for file in files {
inspect_source_root_artifacts(&mut summary, derived_dir, file, provider_model);
}
summary
}
fn summarize_search_corpus(
derived_dir: &Path,
notebook_dir: &Path,
files: &[IndexedSourceFileRow],
) -> std::result::Result<SearchCorpusSummary, String> {
let mut summary = SearchCorpusSummary {
source_files: files.len(),
..SearchCorpusSummary::default()
};
if notebook_dir.is_dir() {
visit_files(notebook_dir, &mut |path| {
if is_notebook_note(path) {
inspect_notebook_search_corpus(&mut summary, derived_dir, notebook_dir, path);
}
})?;
}
for file in files {
inspect_source_root_search_corpus(&mut summary, derived_dir, file);
}
Ok(summary)
}
impl DerivedArtifactSummary {
fn has_no_artifacts(&self) -> bool {
self.file_count == 0 && !self.has_diagnostics()
}
fn has_diagnostics(&self) -> bool {
self.corrupt_json > 0
|| self.missing_notebook_artifacts > 0
|| self.stale_notebook_artifacts > 0
}
fn detail(&self) -> String {
let mut parts = vec![format!("{} files", self.file_count)];
if self.missing_notebook_artifacts > 0 {
parts.push(format!(
"{} notebook index/chunk artifacts missing",
self.missing_notebook_artifacts
));
}
if self.stale_notebook_artifacts > 0 {
parts.push(format!(
"{} notebook index/chunk artifacts stale",
self.stale_notebook_artifacts
));
}
if self.corrupt_json > 0 {
parts.push(format!("{} corrupt json artifacts", self.corrupt_json));
}
push_samples_detail(&mut parts, &self.samples);
parts.join(", ")
}
}
impl SourceRootArtifactSummary {
fn has_diagnostics(&self) -> bool {
self.source_missing > 0
|| self.text_missing > 0
|| self.text_stale > 0
|| self.chunk_missing > 0
|| self.chunk_stale > 0
|| self.chunk_corrupt > 0
|| self.embedding_missing > 0
|| self.embedding_stale > 0
|| self.embedding_corrupt > 0
|| self.embedding_mismatch > 0
}
fn detail(&self) -> String {
let mut parts = vec![format!("{} indexed files", self.indexed_files)];
push_count(&mut parts, self.source_missing, "source files missing");
push_count(&mut parts, self.text_missing, "text artifacts missing");
push_count(&mut parts, self.text_stale, "text artifacts stale");
push_count(&mut parts, self.chunk_missing, "chunk artifacts missing");
push_count(&mut parts, self.chunk_stale, "chunk artifacts stale");
push_count(&mut parts, self.chunk_corrupt, "chunk artifacts corrupt");
if self.embedding_checked {
push_count(
&mut parts,
self.embedding_missing,
"embedding artifacts missing",
);
push_count(
&mut parts,
self.embedding_stale,
"embedding artifacts stale",
);
push_count(
&mut parts,
self.embedding_corrupt,
"embedding artifacts corrupt",
);
push_count(
&mut parts,
self.embedding_mismatch,
"embedding artifacts for another provider/model",
);
} else {
parts.push("embeddings not checked without provider/model selection".to_string());
}
push_samples_detail(&mut parts, &self.samples);
parts.join(", ")
}
}
impl SearchCorpusSummary {
fn ready_chunks(&self) -> usize {
self.notebook_ready_chunks + self.source_ready_chunks
}
fn has_diagnostics(&self) -> bool {
self.notebook_chunk_missing > 0
|| self.notebook_chunk_stale > 0
|| self.notebook_chunk_corrupt > 0
|| self.notebook_chunk_empty > 0
|| self.source_missing > 0
|| self.source_chunk_missing > 0
|| self.source_chunk_stale > 0
|| self.source_chunk_corrupt > 0
|| self.source_chunk_empty > 0
}
fn detail(&self) -> String {
let mut parts = vec![
format!(
"{} persisted search chunks ready: {} notebook, {} source-root",
self.ready_chunks(),
self.notebook_ready_chunks,
self.source_ready_chunks
),
format!("{} notes", self.notebook_notes),
format!("{} indexed source files", self.source_files),
];
push_count(
&mut parts,
self.notebook_chunk_missing,
"notebook chunk artifacts missing",
);
push_count(
&mut parts,
self.notebook_chunk_stale,
"notebook chunk artifacts stale",
);
push_count(
&mut parts,
self.notebook_chunk_corrupt,
"notebook chunk artifacts corrupt",
);
push_count(
&mut parts,
self.notebook_chunk_empty,
"notebook chunk artifacts empty",
);
push_count(&mut parts, self.source_missing, "source files missing");
push_count(
&mut parts,
self.source_chunk_missing,
"source-root chunk artifacts missing",
);
push_count(
&mut parts,
self.source_chunk_stale,
"source-root chunk artifacts stale",
);
push_count(
&mut parts,
self.source_chunk_corrupt,
"source-root chunk artifacts corrupt",
);
push_count(
&mut parts,
self.source_chunk_empty,
"source-root chunk artifacts empty",
);
push_samples_detail(&mut parts, &self.samples);
parts.join(", ")
}
}
fn push_count(parts: &mut Vec<String>, count: usize, label: &str) {
if count > 0 {
parts.push(format!("{count} {label}"));
}
}
fn push_samples_detail(parts: &mut Vec<String>, samples: &DiagnosticSamples) {
if !samples.is_empty() {
parts.push(format!("samples: {}", samples.detail()));
}
}
fn push_diagnostic_sample(samples: &mut DiagnosticSamples, sample: impl Into<String>) {
samples.push(sample);
}
fn inspect_notebook_artifacts(
summary: &mut DerivedArtifactSummary,
derived_dir: &Path,
notebook_dir: &Path,
note_path: &Path,
) {
let Ok(relative) = note_path.strip_prefix(notebook_dir) else {
return;
};
let index = notebook_index_artifact_path(derived_dir, relative);
if !index.is_file() {
summary.missing_notebook_artifacts += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing notebook index {}", display_path(relative)),
);
} else if !file_is_at_least_as_new(&index, note_path) {
summary.stale_notebook_artifacts += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("stale notebook index {}", display_path(relative)),
);
}
let chunks = notebook_chunk_artifact_path(derived_dir, relative);
if !chunks.is_file() {
summary.missing_notebook_artifacts += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing notebook chunks {}", display_path(relative)),
);
return;
}
let chunk_baseline = if file_is_at_least_as_new(&index, note_path) {
index.as_path()
} else {
note_path
};
if !file_is_at_least_as_new(&chunks, chunk_baseline) {
summary.stale_notebook_artifacts += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("stale notebook chunks {}", display_path(relative)),
);
}
}
fn inspect_source_root_artifacts(
summary: &mut SourceRootArtifactSummary,
derived_dir: &Path,
file: &IndexedSourceFileRow,
provider_model: Option<&(String, String)>,
) {
let source_label = format!(
"{}/{}",
file.source_root_id,
display_path(Path::new(&file.relative_path))
);
let source = Path::new(&file.absolute_path);
if !source.is_file() {
summary.source_missing += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing source {source_label}"),
);
return;
}
let relative = Path::new(&file.relative_path);
let text = source_root_text_artifact_path(derived_dir, &file.source_root_id, relative);
let text_current = text.is_file() && file_is_at_least_as_new(&text, source);
if !text.is_file() {
summary.text_missing += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing text artifact {source_label}"),
);
} else if !text_current {
summary.text_stale += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("stale text artifact {source_label}"),
);
}
let chunks = source_root_chunk_artifact_path(derived_dir, &file.source_root_id, relative);
let chunk_baseline = if text_current { text.as_path() } else { source };
let chunk_current = chunks.is_file() && file_is_at_least_as_new(&chunks, chunk_baseline);
if !chunks.is_file() {
summary.chunk_missing += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing chunk artifact {source_label}"),
);
} else {
if !chunk_current {
summary.chunk_stale += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("stale chunk artifact {source_label}"),
);
}
if read_chunk_texts(&chunks).is_err() {
summary.chunk_corrupt += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("corrupt chunk artifact {source_label}"),
);
}
}
let Some((provider, model_id)) = provider_model else {
return;
};
let embedding =
source_root_embedding_artifact_path(derived_dir, &file.source_root_id, relative);
if !embedding.is_file() {
summary.embedding_missing += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing embedding artifact {source_label}"),
);
return;
}
if !chunk_current || !file_is_at_least_as_new(&embedding, &chunks) {
summary.embedding_stale += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("stale embedding artifact {source_label}"),
);
}
match read_embedding_metadata(&embedding) {
Ok(Some((artifact_provider, artifact_model))) => {
if &artifact_provider != provider || &artifact_model != model_id {
summary.embedding_mismatch += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("embedding provider/model mismatch {source_label}"),
);
}
}
Ok(None) => {
summary.embedding_mismatch += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("embedding metadata missing {source_label}"),
);
}
Err(_) => {
summary.embedding_corrupt += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("corrupt embedding artifact {source_label}"),
);
}
}
}
fn inspect_notebook_search_corpus(
summary: &mut SearchCorpusSummary,
derived_dir: &Path,
notebook_dir: &Path,
note_path: &Path,
) {
summary.notebook_notes += 1;
let Ok(relative) = note_path.strip_prefix(notebook_dir) else {
return;
};
let chunks = notebook_chunk_artifact_path(derived_dir, relative);
if !chunks.is_file() {
summary.notebook_chunk_missing += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing notebook chunks {}", display_path(relative)),
);
return;
}
let index = notebook_index_artifact_path(derived_dir, relative);
let chunk_baseline = if file_is_at_least_as_new(&index, note_path) {
index.as_path()
} else {
note_path
};
let chunk_current = file_is_at_least_as_new(&chunks, chunk_baseline);
if !chunk_current {
summary.notebook_chunk_stale += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("stale notebook chunks {}", display_path(relative)),
);
}
match read_chunk_texts(&chunks) {
Ok(items) if items.is_empty() => {
summary.notebook_chunk_empty += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("empty notebook chunks {}", display_path(relative)),
);
}
Ok(items) if chunk_current => {
summary.notebook_ready_chunks += items.len();
}
Ok(_) => {}
Err(_) => {
summary.notebook_chunk_corrupt += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("corrupt notebook chunks {}", display_path(relative)),
);
}
}
}
fn inspect_source_root_search_corpus(
summary: &mut SearchCorpusSummary,
derived_dir: &Path,
file: &IndexedSourceFileRow,
) {
let source_label = format!(
"{}/{}",
file.source_root_id,
display_path(Path::new(&file.relative_path))
);
let source = Path::new(&file.absolute_path);
if !source.is_file() {
summary.source_missing += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing source {source_label}"),
);
return;
}
let relative = Path::new(&file.relative_path);
let chunks = source_root_chunk_artifact_path(derived_dir, &file.source_root_id, relative);
if !chunks.is_file() {
summary.source_chunk_missing += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("missing source-root chunks {source_label}"),
);
return;
}
let chunk_current = file_is_at_least_as_new(&chunks, source);
if !chunk_current {
summary.source_chunk_stale += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("stale source-root chunks {source_label}"),
);
}
match read_chunk_texts(&chunks) {
Ok(items) if items.is_empty() => {
summary.source_chunk_empty += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("empty source-root chunks {source_label}"),
);
}
Ok(items) if chunk_current => {
summary.source_ready_chunks += items.len();
}
Ok(_) => {}
Err(_) => {
summary.source_chunk_corrupt += 1;
push_diagnostic_sample(
&mut summary.samples,
format!("corrupt source-root chunks {source_label}"),
);
}
}
}
fn notebook_index_artifact_path(derived_dir: &Path, relative: &Path) -> PathBuf {
let mut artifact = derived_dir.join("notebook-index").join(relative);
artifact.set_extension("txt");
artifact
}
fn notebook_chunk_artifact_path(derived_dir: &Path, relative: &Path) -> PathBuf {
let mut artifact = derived_dir.join("notebook-chunks").join(relative);
artifact.set_extension("json");
artifact
}
fn source_root_text_artifact_path(
derived_dir: &Path,
source_root_id: &str,
relative: &Path,
) -> PathBuf {
let mut artifact = derived_dir
.join("source-root-text")
.join(source_root_id)
.join(relative);
artifact.set_extension("txt");
artifact
}
fn source_root_chunk_artifact_path(
derived_dir: &Path,
source_root_id: &str,
relative: &Path,
) -> PathBuf {
let mut artifact = derived_dir
.join("source-root-chunks")
.join(source_root_id)
.join(relative);
artifact.set_extension("json");
artifact
}
fn source_root_embedding_artifact_path(
derived_dir: &Path,
source_root_id: &str,
relative: &Path,
) -> PathBuf {
let mut artifact = derived_dir
.join("source-root-embeddings")
.join(source_root_id)
.join(relative);
artifact.set_extension("json");
artifact
}
fn is_notebook_note(path: &Path) -> bool {
matches!(
path.extension().and_then(|extension| extension.to_str()),
Some("md" | "typ")
)
}
fn is_json_file(path: &Path) -> bool {
matches!(
path.extension().and_then(|extension| extension.to_str()),
Some("json")
)
}
fn is_readable_json(path: &Path) -> bool {
std::fs::read_to_string(path)
.ok()
.and_then(|content| serde_json::from_str::<serde_json::Value>(&content).ok())
.is_some()
}
fn read_chunk_texts(path: &Path) -> std::result::Result<Vec<String>, String> {
let content =
std::fs::read_to_string(path).map_err(|error| format!("failed to read json: {error}"))?;
let items = serde_json::from_str::<Vec<serde_json::Value>>(&content)
.map_err(|error| format!("failed to parse chunk json: {error}"))?;
items
.into_iter()
.map(|item| {
item.get("text")
.and_then(|value| value.as_str())
.map(str::to_string)
.ok_or_else(|| "chunk item missing text string".to_string())
})
.collect::<std::result::Result<Vec<_>, _>>()
}
fn read_embedding_metadata(path: &Path) -> std::result::Result<Option<(String, String)>, String> {
let content =
std::fs::read_to_string(path).map_err(|error| format!("failed to read json: {error}"))?;
let value = serde_json::from_str::<serde_json::Value>(&content)
.map_err(|error| format!("failed to parse json: {error}"))?;
let vectors = value
.get("vectors")
.and_then(|value| value.as_array())
.ok_or_else(|| "embedding artifact missing vectors array".to_string())?;
if vectors.iter().any(|vector| !is_numeric_vector(vector)) {
return Err("embedding artifact contains a non-numeric vector".to_string());
}
let Some(provider) = value.get("provider").and_then(|value| value.as_str()) else {
return Ok(None);
};
let Some(model_id) = value.get("model_id").and_then(|value| value.as_str()) else {
return Ok(None);
};
Ok(Some((provider.to_string(), model_id.to_string())))
}
fn is_numeric_vector(value: &serde_json::Value) -> bool {
value
.as_array()
.map(|items| items.iter().all(serde_json::Value::is_number))
.unwrap_or(false)
}
fn file_is_at_least_as_new(artifact: &Path, source: &Path) -> bool {
let Ok(artifact_modified) = artifact.metadata().and_then(|metadata| metadata.modified()) else {
return false;
};
let Ok(source_modified) = source.metadata().and_then(|metadata| metadata.modified()) else {
return false;
};
artifact_modified >= source_modified
}
fn display_relative_path(root: &Path, path: &Path) -> String {
path.strip_prefix(root)
.map(display_path)
.unwrap_or_else(|_| display_path(path))
}
fn display_path(path: &Path) -> String {
path.to_string_lossy().replace('\\', "/")
}
fn visit_files<F>(root: &Path, on_file: &mut F) -> std::result::Result<(), String>
where
F: FnMut(&Path),
{
for entry in std::fs::read_dir(root)
.map_err(|error| format!("failed to read {}: {error}", root.display()))?
{
let entry =
entry.map_err(|error| format!("failed to read {} entry: {error}", root.display()))?;
let file_type = entry
.file_type()
.map_err(|error| format!("failed to inspect {}: {error}", entry.path().display()))?;
if file_type.is_symlink() {
continue;
}
if file_type.is_dir() {
visit_files(&entry.path(), on_file)?;
} else if file_type.is_file() {
on_file(&entry.path());
}
}
Ok(())
}
fn open_in_browser(url: &str) -> Result<()> {
#[cfg(target_os = "windows")]
{
Command::new("cmd").args(["/C", "start", "", url]).spawn()?;
return Ok(());
}
#[cfg(target_os = "macos")]
{
Command::new("open").arg(url).spawn()?;
return Ok(());
}
#[cfg(all(unix, not(target_os = "macos")))]
{
Command::new("xdg-open").arg(url).spawn()?;
return Ok(());
}
#[allow(unreachable_code)]
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use soma_studio_core::ProviderSelectionResponse;
use std::path::PathBuf;
#[tokio::test]
async fn doctor_report_marks_missing_runtime_paths() {
let temp_dir = unique_temp_dir("missing");
std::fs::create_dir_all(&temp_dir).expect("temp dir");
let config = test_config(&temp_dir);
let report = build_doctor_report(&config).await;
assert!(report.contains("data dir:"));
assert!(report.contains("missing"));
assert!(report.contains("web shell:"));
assert!(report.contains("db schema:"));
assert!(report.contains("provider tests:"));
assert!(report.contains("not available until db is created"));
assert!(report.contains("search index:"));
assert!(report.contains("search corpus:"));
let _ = std::fs::remove_dir_all(temp_dir);
}
#[tokio::test]
async fn doctor_report_includes_notebook_and_storage_state() {
let temp_dir = unique_temp_dir("ready");
let config = test_config(&temp_dir);
config.ensure_directories().expect("directories");
std::fs::create_dir_all(&config.web_build_dir).expect("web build dir");
std::fs::write(&config.web_shell_file, "<main>Soma Studio</main>").expect("web shell");
std::fs::write(config.notebook_dir.join("daily.md"), "# Daily").expect("markdown note");
std::fs::write(config.notebook_dir.join("report.typ"), "= Report").expect("typst note");
let storage = StudioStorage::open(&config).await.expect("storage");
storage
.save_provider_selection(&ProviderSelectionResponse {
selected_provider: Some("ollama".to_string()),
selected_model_id: Some("embed-model".to_string()),
})
.await
.expect("provider selection");
storage
.record_provider_test("ollama", true, "provider ok")
.await
.expect("provider test");
let report = build_doctor_report(&config).await;
assert!(report.contains("provider selection:"));
assert!(report.contains("ollama/embed-model"));
assert!(report.contains("provider tests:"));
assert!(report.contains("1 passed, 0 failed, 0 unknown"));
assert!(report.contains("db schema:"));
assert!(report.contains(&format!(
"version {0} of {0}",
soma_studio_server::STORAGE_SCHEMA_VERSION
)));
assert!(report.contains("notebook notes:"));
assert!(report.contains("2 notes: 1 markdown, 1 typst"));
assert!(report.contains("web shell:"));
assert!(report.contains("search index:"));
assert!(report.contains("search corpus:"));
assert!(report.contains("0 persisted search chunks ready"));
let _ = std::fs::remove_dir_all(temp_dir);
}
#[test]
fn provider_tests_with_unknown_results_are_warnings() {
let check = provider_tests_check(&[soma_studio_server::ProviderStatusRow {
provider: "ollama".to_string(),
last_test_ok: None,
last_test_detail: None,
last_tested_at: None,
}]);
assert_eq!(check.status, "warn");
assert_eq!(check.detail, "0 passed, 0 failed, 1 unknown");
}
#[test]
fn web_shell_check_accepts_embedded_runtime_assets() {
let temp_dir = unique_temp_dir("embedded-web");
let checks = web_asset_checks(
&temp_dir.join("missing-web"),
&temp_dir.join("missing-web").join("spa.html"),
true,
7,
);
assert_eq!(checks[0].status, "warn");
assert!(checks[0].detail.contains("7 embedded assets available"));
assert_eq!(checks[1].status, "ok");
assert!(checks[1].detail.contains("embedded spa.html available"));
}
#[test]
fn derived_artifacts_check_reports_missing_stale_and_corrupt_artifacts() {
let temp_dir = unique_temp_dir("derived-diagnostics");
let config = test_config(&temp_dir);
config.ensure_directories().expect("directories");
let note = config.notebook_dir.join("topic.md");
std::fs::write(¬e, "# Topic\n\nold text").expect("note");
std::fs::write(config.notebook_dir.join("missing.typ"), "= Missing").expect("typst note");
let relative = Path::new("topic.md");
let index = notebook_index_artifact_path(&config.derived_dir, relative);
let chunks = notebook_chunk_artifact_path(&config.derived_dir, relative);
std::fs::create_dir_all(index.parent().expect("index parent")).expect("index dir");
std::fs::create_dir_all(chunks.parent().expect("chunk parent")).expect("chunk dir");
std::fs::write(&index, "old text").expect("index");
std::fs::write(&chunks, r#"[{"index":0,"text":"old text"}]"#).expect("chunks");
let corrupt = config
.derived_dir
.join("source-root-embeddings")
.join("root")
.join("topic.json");
std::fs::create_dir_all(corrupt.parent().expect("corrupt parent")).expect("corrupt dir");
std::fs::write(&corrupt, "{not json").expect("corrupt json");
std::thread::sleep(std::time::Duration::from_millis(30));
std::fs::write(¬e, "# Topic\n\nfresh text").expect("fresh note");
let check = derived_artifacts_check(&config.derived_dir, &config.notebook_dir);
assert_eq!(check.status, "warn");
assert!(
check
.detail
.contains("2 notebook index/chunk artifacts missing")
);
assert!(
check
.detail
.contains("2 notebook index/chunk artifacts stale")
);
assert!(check.detail.contains("1 corrupt json artifacts"));
assert!(check.detail.contains("samples:"));
assert!(
check
.detail
.contains("source-root-embeddings/root/topic.json")
);
assert!(check.detail.contains("2 more omitted"));
let _ = std::fs::remove_dir_all(temp_dir);
}
#[test]
fn derived_artifacts_check_reports_missing_notebook_artifacts_without_files() {
let temp_dir = unique_temp_dir("derived-missing-only");
let config = test_config(&temp_dir);
config.ensure_directories().expect("directories");
std::fs::write(config.notebook_dir.join("draft.md"), "# Draft").expect("note");
let check = derived_artifacts_check(&config.derived_dir, &config.notebook_dir);
assert_eq!(check.status, "warn");
assert!(check.detail.contains("0 files"));
assert!(
check
.detail
.contains("2 notebook index/chunk artifacts missing")
);
let _ = std::fs::remove_dir_all(temp_dir);
}
#[test]
fn source_root_artifacts_check_reports_missing_file_unit_artifacts() {
let temp_dir = unique_temp_dir("source-root-missing-artifacts");
let config = test_config(&temp_dir);
config.ensure_directories().expect("directories");
let source = temp_dir.join("source").join("topic.md");
std::fs::create_dir_all(source.parent().expect("source parent")).expect("source dir");
std::fs::write(&source, "# Topic").expect("source");
let files = vec![indexed_source_file("root", "topic.md", &source)];
let check = source_root_artifacts_check(&config.derived_dir, &files, None);
assert_eq!(check.status, "warn");
assert!(check.detail.contains("1 indexed files"));
assert!(check.detail.contains("1 text artifacts missing"));
assert!(check.detail.contains("1 chunk artifacts missing"));
assert!(check.detail.contains("missing text artifact root/topic.md"));
assert!(
check
.detail
.contains("embeddings not checked without provider/model selection")
);
let _ = std::fs::remove_dir_all(temp_dir);
}
#[test]
fn source_root_artifacts_check_reports_corrupt_and_mismatched_embeddings() {
let temp_dir = unique_temp_dir("source-root-corrupt-artifacts");
let config = test_config(&temp_dir);
config.ensure_directories().expect("directories");
let source = temp_dir.join("source").join("topic.md");
std::fs::create_dir_all(source.parent().expect("source parent")).expect("source dir");
std::fs::write(&source, "# Topic").expect("source");
let files = vec![indexed_source_file("root", "topic.md", &source)];
let relative = Path::new("topic.md");
let text = source_root_text_artifact_path(&config.derived_dir, "root", relative);
let chunks = source_root_chunk_artifact_path(&config.derived_dir, "root", relative);
let embedding = source_root_embedding_artifact_path(&config.derived_dir, "root", relative);
std::fs::create_dir_all(text.parent().expect("text parent")).expect("text dir");
std::fs::create_dir_all(chunks.parent().expect("chunk parent")).expect("chunk dir");
std::fs::create_dir_all(embedding.parent().expect("embedding parent"))
.expect("embedding dir");
std::fs::write(&text, "Topic").expect("text");
std::fs::write(&chunks, r#"{"index":0,"text":"Topic"}"#).expect("chunks");
std::fs::write(
&embedding,
r#"{"provider":"ollama","model_id":"other","vectors":[[0.1,0.2]]}"#,
)
.expect("embedding");
let check = source_root_artifacts_check(
&config.derived_dir,
&files,
Some(("ollama".to_string(), "embed-model".to_string())),
);
assert_eq!(check.status, "warn");
assert!(check.detail.contains("1 chunk artifacts corrupt"));
assert!(
check
.detail
.contains("corrupt chunk artifact root/topic.md")
);
assert!(
check
.detail
.contains("1 embedding artifacts for another provider/model")
);
let _ = std::fs::remove_dir_all(temp_dir);
}
#[test]
fn source_root_artifacts_check_reports_malformed_embedding_vectors() {
let temp_dir = unique_temp_dir("source-root-malformed-embedding");
let config = test_config(&temp_dir);
config.ensure_directories().expect("directories");
let source = temp_dir.join("source").join("topic.md");
std::fs::create_dir_all(source.parent().expect("source parent")).expect("source dir");
std::fs::write(&source, "# Topic").expect("source");
let files = vec![indexed_source_file("root", "topic.md", &source)];
let relative = Path::new("topic.md");
let text = source_root_text_artifact_path(&config.derived_dir, "root", relative);
let chunks = source_root_chunk_artifact_path(&config.derived_dir, "root", relative);
let embedding = source_root_embedding_artifact_path(&config.derived_dir, "root", relative);
std::fs::create_dir_all(text.parent().expect("text parent")).expect("text dir");
std::fs::create_dir_all(chunks.parent().expect("chunk parent")).expect("chunk dir");
std::fs::create_dir_all(embedding.parent().expect("embedding parent"))
.expect("embedding dir");
std::fs::write(&text, "Topic").expect("text");
std::fs::write(&chunks, r#"[{"index":0,"text":"Topic"}]"#).expect("chunks");
std::fs::write(
&embedding,
r#"{"provider":"ollama","model_id":"embed-model","vectors":[{"bad":true}]}"#,
)
.expect("embedding");
let check = source_root_artifacts_check(
&config.derived_dir,
&files,
Some(("ollama".to_string(), "embed-model".to_string())),
);
assert_eq!(check.status, "warn");
assert!(check.detail.contains("1 embedding artifacts corrupt"));
let _ = std::fs::remove_dir_all(temp_dir);
}
#[test]
fn search_corpus_check_reports_ready_notebook_and_source_root_chunks() {
let temp_dir = unique_temp_dir("search-corpus-ready");
let config = test_config(&temp_dir);
config.ensure_directories().expect("directories");
let note = config.notebook_dir.join("topic.md");
std::fs::write(¬e, "# Topic\n\nsearch text").expect("note");
let notebook_chunks =
notebook_chunk_artifact_path(&config.derived_dir, Path::new("topic.md"));
std::fs::create_dir_all(notebook_chunks.parent().expect("notebook chunk parent"))
.expect("notebook chunk dir");
std::fs::write(
¬ebook_chunks,
r#"[{"index":0,"text":"search text"},{"index":1,"text":"more text"}]"#,
)
.expect("notebook chunks");
let source = temp_dir.join("source").join("alpha.md");
std::fs::create_dir_all(source.parent().expect("source parent")).expect("source dir");
std::fs::write(&source, "# Alpha\n\nsource text").expect("source");
let files = vec![indexed_source_file("root", "alpha.md", &source)];
let source_chunks =
source_root_chunk_artifact_path(&config.derived_dir, "root", Path::new("alpha.md"));
std::fs::create_dir_all(source_chunks.parent().expect("source chunk parent"))
.expect("source chunk dir");
std::fs::write(&source_chunks, r#"[{"index":0,"text":"source text"}]"#)
.expect("source chunks");
let check = search_corpus_check(&config.derived_dir, &config.notebook_dir, &files);
assert_eq!(check.status, "ok");
assert!(
check
.detail
.contains("3 persisted search chunks ready: 2 notebook, 1 source-root")
);
assert!(check.detail.contains("1 notes"));
assert!(check.detail.contains("1 indexed source files"));
let _ = std::fs::remove_dir_all(temp_dir);
}
#[test]
fn search_corpus_check_reports_missing_stale_and_corrupt_chunks() {
let temp_dir = unique_temp_dir("search-corpus-diagnostics");
let config = test_config(&temp_dir);
config.ensure_directories().expect("directories");
std::fs::write(config.notebook_dir.join("missing.md"), "# Missing").expect("missing note");
let corrupt_note = config.notebook_dir.join("corrupt.md");
std::fs::write(&corrupt_note, "# Corrupt").expect("corrupt note");
let corrupt_chunks =
notebook_chunk_artifact_path(&config.derived_dir, Path::new("corrupt.md"));
std::fs::create_dir_all(corrupt_chunks.parent().expect("corrupt chunk parent"))
.expect("corrupt chunk dir");
std::fs::write(&corrupt_chunks, r#"{"index":0,"text":"bad shape"}"#)
.expect("corrupt chunks");
let source = temp_dir.join("source").join("alpha.md");
std::fs::create_dir_all(source.parent().expect("source parent")).expect("source dir");
std::fs::write(&source, "# Alpha\n\nold").expect("source");
let source_chunks =
source_root_chunk_artifact_path(&config.derived_dir, "root", Path::new("alpha.md"));
std::fs::create_dir_all(source_chunks.parent().expect("source chunk parent"))
.expect("source chunk dir");
std::fs::write(&source_chunks, r#"[{"index":0,"text":"old"}]"#).expect("source chunks");
std::thread::sleep(std::time::Duration::from_millis(30));
std::fs::write(&source, "# Alpha\n\nfresh").expect("fresh source");
let files = vec![indexed_source_file("root", "alpha.md", &source)];
let check = search_corpus_check(&config.derived_dir, &config.notebook_dir, &files);
assert_eq!(check.status, "warn");
assert!(check.detail.contains("1 notebook chunk artifacts missing"));
assert!(check.detail.contains("1 notebook chunk artifacts corrupt"));
assert!(check.detail.contains("1 source-root chunk artifacts stale"));
assert!(check.detail.contains("samples:"));
let _ = std::fs::remove_dir_all(temp_dir);
}
fn indexed_source_file(
source_root_id: &str,
relative_path: &str,
absolute_path: &Path,
) -> IndexedSourceFileRow {
IndexedSourceFileRow {
source_root_id: source_root_id.to_string(),
relative_path: relative_path.to_string(),
absolute_path: absolute_path.to_string_lossy().to_string(),
}
}
fn test_config(temp_dir: &Path) -> AppConfig {
AppConfig {
app_name: "Soma Studio".to_string(),
bind_addr: "127.0.0.1:0".to_string(),
project_root: temp_dir.to_path_buf(),
data_dir: temp_dir.join("data"),
derived_dir: temp_dir.join("data").join("derived"),
notebook_dir: temp_dir.join("data").join("notebook"),
user_assets_dir: temp_dir.join("data").join("assets"),
db_path: temp_dir.join("data").join("soma-studio.db"),
web_build_dir: temp_dir.join("web").join("build"),
web_shell_file: temp_dir.join("web").join("build").join("spa.html"),
}
}
fn unique_temp_dir(label: &str) -> PathBuf {
let nanos = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("system time")
.as_nanos();
std::env::temp_dir().join(format!(
"soma-studio-cli-doctor-{label}-{}-{nanos}",
std::process::id()
))
}
}