use toml_edit::{Array, DocumentMut, Item, RawString, Table, Value};
static CANONICAL_ORDER: &[&str] = &[
"agent",
"llm",
"skills",
"memory",
"index",
"tools",
"mcp",
"telegram",
"discord",
"slack",
"a2a",
"acp",
"gateway",
"daemon",
"scheduler",
"orchestration",
"classifiers",
"security",
"vault",
"timeouts",
"cost",
"observability",
"debug",
"logging",
"tui",
"agents",
"experiments",
"lsp",
];
#[derive(Debug, thiserror::Error)]
pub enum MigrateError {
#[error("failed to parse input config: {0}")]
Parse(#[from] toml_edit::TomlError),
#[error("failed to parse reference config: {0}")]
Reference(toml_edit::TomlError),
#[error("migration failed: invalid TOML structure — {0}")]
InvalidStructure(&'static str),
}
#[derive(Debug)]
pub struct MigrationResult {
pub output: String,
pub added_count: usize,
pub sections_added: Vec<String>,
}
pub struct ConfigMigrator {
reference_src: &'static str,
}
impl Default for ConfigMigrator {
fn default() -> Self {
Self::new()
}
}
impl ConfigMigrator {
#[must_use]
pub fn new() -> Self {
Self {
reference_src: include_str!("../config/default.toml"),
}
}
pub fn migrate(&self, user_toml: &str) -> Result<MigrationResult, MigrateError> {
let reference_doc = self
.reference_src
.parse::<DocumentMut>()
.map_err(MigrateError::Reference)?;
let mut user_doc = user_toml.parse::<DocumentMut>()?;
let mut added_count = 0usize;
let mut sections_added: Vec<String> = Vec::new();
for (key, ref_item) in reference_doc.as_table() {
if ref_item.is_table() {
let ref_table = ref_item.as_table().expect("is_table checked above");
if user_doc.contains_key(key) {
if let Some(user_table) = user_doc.get_mut(key).and_then(Item::as_table_mut) {
added_count += merge_table_commented(user_table, ref_table, key);
}
} else {
if user_toml.contains(&format!("# [{key}]")) {
continue;
}
let commented = commented_table_block(key, ref_table);
if !commented.is_empty() {
sections_added.push(key.to_owned());
}
added_count += 1;
}
} else {
if !user_doc.contains_key(key) {
let raw = format_commented_item(key, ref_item);
if !raw.is_empty() {
sections_added.push(format!("__scalar__{key}"));
added_count += 1;
}
}
}
}
let user_str = user_doc.to_string();
let mut output = user_str;
for key in §ions_added {
if let Some(scalar_key) = key.strip_prefix("__scalar__") {
if let Some(ref_item) = reference_doc.get(scalar_key) {
let raw = format_commented_item(scalar_key, ref_item);
if !raw.is_empty() {
output.push('\n');
output.push_str(&raw);
output.push('\n');
}
}
} else if let Some(ref_table) = reference_doc.get(key.as_str()).and_then(Item::as_table)
{
let block = commented_table_block(key, ref_table);
if !block.is_empty() {
output.push('\n');
output.push_str(&block);
}
}
}
output = reorder_sections(&output, CANONICAL_ORDER);
let sections_added_clean: Vec<String> = sections_added
.into_iter()
.filter(|k| !k.starts_with("__scalar__"))
.collect();
Ok(MigrationResult {
output,
added_count,
sections_added: sections_added_clean,
})
}
}
fn merge_table_commented(user_table: &mut Table, ref_table: &Table, section_key: &str) -> usize {
let mut count = 0usize;
for (key, ref_item) in ref_table {
if ref_item.is_table() {
if user_table.contains_key(key) {
let pair = (
user_table.get_mut(key).and_then(Item::as_table_mut),
ref_item.as_table(),
);
if let (Some(user_sub_table), Some(ref_sub_table)) = pair {
let sub_key = format!("{section_key}.{key}");
count += merge_table_commented(user_sub_table, ref_sub_table, &sub_key);
}
} else if let Some(ref_sub_table) = ref_item.as_table() {
let dotted = format!("{section_key}.{key}");
let marker = format!("# [{dotted}]");
let existing = user_table
.decor()
.suffix()
.and_then(RawString::as_str)
.unwrap_or("");
if !existing.contains(&marker) {
let block = commented_table_block(&dotted, ref_sub_table);
if !block.is_empty() {
let new_suffix = format!("{existing}\n{block}");
user_table.decor_mut().set_suffix(new_suffix);
count += 1;
}
}
}
} else if ref_item.is_array_of_tables() {
} else {
if !user_table.contains_key(key) {
let raw_value = ref_item
.as_value()
.map(value_to_toml_string)
.unwrap_or_default();
if !raw_value.is_empty() {
let comment_line = format!("# {key} = {raw_value}\n");
append_comment_to_table_suffix(user_table, &comment_line);
count += 1;
}
}
}
}
count
}
fn append_comment_to_table_suffix(table: &mut Table, comment_line: &str) {
let existing: String = table
.decor()
.suffix()
.and_then(RawString::as_str)
.unwrap_or("")
.to_owned();
if !existing.contains(comment_line.trim()) {
let new_suffix = format!("{existing}{comment_line}");
table.decor_mut().set_suffix(new_suffix);
}
}
fn format_commented_item(key: &str, item: &Item) -> String {
if let Some(val) = item.as_value() {
let raw = value_to_toml_string(val);
if !raw.is_empty() {
return format!("# {key} = {raw}\n");
}
}
String::new()
}
fn commented_table_block(section_name: &str, table: &Table) -> String {
use std::fmt::Write as _;
let mut lines = format!("# [{section_name}]\n");
for (key, item) in table {
if item.is_table() {
if let Some(sub_table) = item.as_table() {
let sub_name = format!("{section_name}.{key}");
let sub_block = commented_table_block(&sub_name, sub_table);
if !sub_block.is_empty() {
lines.push('\n');
lines.push_str(&sub_block);
}
}
} else if item.is_array_of_tables() {
} else if let Some(val) = item.as_value() {
let raw = value_to_toml_string(val);
if !raw.is_empty() {
let _ = writeln!(lines, "# {key} = {raw}");
}
}
}
if lines.trim() == format!("[{section_name}]") {
return String::new();
}
lines
}
fn value_to_toml_string(val: &Value) -> String {
match val {
Value::String(s) => {
let inner = s.value();
format!("\"{inner}\"")
}
Value::Integer(i) => i.value().to_string(),
Value::Float(f) => {
let v = f.value();
if v.fract() == 0.0 {
format!("{v:.1}")
} else {
format!("{v}")
}
}
Value::Boolean(b) => b.value().to_string(),
Value::Array(arr) => format_array(arr),
Value::InlineTable(t) => {
let pairs: Vec<String> = t
.iter()
.map(|(k, v)| format!("{k} = {}", value_to_toml_string(v)))
.collect();
format!("{{ {} }}", pairs.join(", "))
}
Value::Datetime(dt) => dt.value().to_string(),
}
}
fn format_array(arr: &Array) -> String {
if arr.is_empty() {
return "[]".to_owned();
}
let items: Vec<String> = arr.iter().map(value_to_toml_string).collect();
format!("[{}]", items.join(", "))
}
fn reorder_sections(toml_str: &str, canonical_order: &[&str]) -> String {
let sections = split_into_sections(toml_str);
if sections.is_empty() {
return toml_str.to_owned();
}
let preamble_block = sections
.iter()
.find(|(h, _)| h.is_empty())
.map_or("", |(_, c)| c.as_str());
let section_map: Vec<(&str, &str)> = sections
.iter()
.filter(|(h, _)| !h.is_empty())
.map(|(h, c)| (h.as_str(), c.as_str()))
.collect();
let mut out = String::new();
if !preamble_block.is_empty() {
out.push_str(preamble_block);
}
let mut emitted: Vec<bool> = vec![false; section_map.len()];
for &canon in canonical_order {
for (idx, &(header, content)) in section_map.iter().enumerate() {
let section_name = extract_section_name(header);
let top_level = section_name
.split('.')
.next()
.unwrap_or("")
.trim_start_matches('#')
.trim();
if top_level == canon && !emitted[idx] {
out.push_str(content);
emitted[idx] = true;
}
}
}
for (idx, &(_, content)) in section_map.iter().enumerate() {
if !emitted[idx] {
out.push_str(content);
}
}
out
}
fn extract_section_name(header: &str) -> &str {
let trimmed = header.trim().trim_start_matches("# ");
if trimmed.starts_with('[') && trimmed.contains(']') {
let inner = &trimmed[1..];
if let Some(end) = inner.find(']') {
return &inner[..end];
}
}
trimmed
}
fn split_into_sections(toml_str: &str) -> Vec<(String, String)> {
let mut sections: Vec<(String, String)> = Vec::new();
let mut current_header = String::new();
let mut current_content = String::new();
for line in toml_str.lines() {
let trimmed = line.trim();
if is_top_level_section_header(trimmed) {
sections.push((current_header.clone(), current_content.clone()));
trimmed.clone_into(&mut current_header);
line.clone_into(&mut current_content);
current_content.push('\n');
} else {
current_content.push_str(line);
current_content.push('\n');
}
}
if !current_header.is_empty() || !current_content.is_empty() {
sections.push((current_header, current_content));
}
sections
}
fn is_top_level_section_header(line: &str) -> bool {
if line.starts_with('[')
&& !line.starts_with("[[")
&& let Some(end) = line.find(']')
{
return !line[1..end].contains('.');
}
false
}
#[allow(
clippy::too_many_lines,
clippy::format_push_string,
clippy::manual_let_else,
clippy::op_ref,
clippy::collapsible_if
)]
pub fn migrate_llm_to_providers(toml_src: &str) -> Result<MigrationResult, MigrateError> {
let doc = toml_src.parse::<toml_edit::DocumentMut>()?;
let llm = match doc.get("llm").and_then(toml_edit::Item::as_table) {
Some(t) => t,
None => {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
}
};
let has_provider_field = llm.contains_key("provider");
let has_cloud = llm.contains_key("cloud");
let has_openai = llm.contains_key("openai");
let has_gemini = llm.contains_key("gemini");
let has_orchestrator = llm.contains_key("orchestrator");
let has_router = llm.contains_key("router");
let has_providers = llm.contains_key("providers");
if !has_provider_field
&& !has_cloud
&& !has_openai
&& !has_orchestrator
&& !has_router
&& !has_gemini
{
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
}
if has_providers {
return Err(MigrateError::Parse(
"cannot migrate: [[llm.providers]] already exists alongside legacy keys"
.parse::<toml_edit::DocumentMut>()
.unwrap_err(),
));
}
let provider_str = llm
.get("provider")
.and_then(toml_edit::Item::as_str)
.unwrap_or("ollama");
let base_url = llm
.get("base_url")
.and_then(toml_edit::Item::as_str)
.map(str::to_owned);
let model = llm
.get("model")
.and_then(toml_edit::Item::as_str)
.map(str::to_owned);
let embedding_model = llm
.get("embedding_model")
.and_then(toml_edit::Item::as_str)
.map(str::to_owned);
let mut provider_blocks: Vec<String> = Vec::new();
let mut routing: Option<String> = None;
let mut routes_block: Option<String> = None;
match provider_str {
"ollama" => {
let mut block = "[[llm.providers]]\ntype = \"ollama\"\n".to_owned();
if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
if let Some(ref em) = embedding_model {
block.push_str(&format!("embedding_model = \"{em}\"\n"));
}
if let Some(ref u) = base_url {
block.push_str(&format!("base_url = \"{u}\"\n"));
}
provider_blocks.push(block);
}
"claude" => {
let mut block = "[[llm.providers]]\ntype = \"claude\"\n".to_owned();
if let Some(cloud) = llm.get("cloud").and_then(toml_edit::Item::as_table) {
if let Some(m) = cloud.get("model").and_then(toml_edit::Item::as_str) {
block.push_str(&format!("model = \"{m}\"\n"));
}
if let Some(t) = cloud
.get("max_tokens")
.and_then(toml_edit::Item::as_integer)
{
block.push_str(&format!("max_tokens = {t}\n"));
}
if cloud
.get("server_compaction")
.and_then(toml_edit::Item::as_bool)
== Some(true)
{
block.push_str("server_compaction = true\n");
}
if cloud
.get("enable_extended_context")
.and_then(toml_edit::Item::as_bool)
== Some(true)
{
block.push_str("enable_extended_context = true\n");
}
if let Some(thinking) = cloud.get("thinking").and_then(toml_edit::Item::as_table) {
let pairs: Vec<String> =
thinking.iter().map(|(k, v)| format!("{k} = {v}")).collect();
block.push_str(&format!("thinking = {{ {} }}\n", pairs.join(", ")));
}
} else if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
provider_blocks.push(block);
}
"openai" => {
let mut block = "[[llm.providers]]\ntype = \"openai\"\n".to_owned();
if let Some(openai) = llm.get("openai").and_then(toml_edit::Item::as_table) {
copy_str_field(openai, "model", &mut block);
copy_str_field(openai, "base_url", &mut block);
copy_int_field(openai, "max_tokens", &mut block);
copy_str_field(openai, "embedding_model", &mut block);
copy_str_field(openai, "reasoning_effort", &mut block);
} else if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
provider_blocks.push(block);
}
"gemini" => {
let mut block = "[[llm.providers]]\ntype = \"gemini\"\n".to_owned();
if let Some(gemini) = llm.get("gemini").and_then(toml_edit::Item::as_table) {
copy_str_field(gemini, "model", &mut block);
copy_int_field(gemini, "max_tokens", &mut block);
copy_str_field(gemini, "base_url", &mut block);
copy_str_field(gemini, "embedding_model", &mut block);
copy_str_field(gemini, "thinking_level", &mut block);
copy_int_field(gemini, "thinking_budget", &mut block);
if let Some(v) = gemini
.get("include_thoughts")
.and_then(toml_edit::Item::as_bool)
{
block.push_str(&format!("include_thoughts = {v}\n"));
}
} else if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
provider_blocks.push(block);
}
"compatible" => {
if let Some(compat_arr) = llm
.get("compatible")
.and_then(toml_edit::Item::as_array_of_tables)
{
for entry in compat_arr {
let mut block = "[[llm.providers]]\ntype = \"compatible\"\n".to_owned();
copy_str_field(entry, "name", &mut block);
copy_str_field(entry, "base_url", &mut block);
copy_str_field(entry, "model", &mut block);
copy_int_field(entry, "max_tokens", &mut block);
copy_str_field(entry, "embedding_model", &mut block);
provider_blocks.push(block);
}
}
}
"orchestrator" => {
routing = Some("task".to_owned());
if let Some(orch) = llm.get("orchestrator").and_then(toml_edit::Item::as_table) {
let default_name = orch
.get("default")
.and_then(toml_edit::Item::as_str)
.unwrap_or("")
.to_owned();
let embed_name = orch
.get("embed")
.and_then(toml_edit::Item::as_str)
.unwrap_or("")
.to_owned();
if let Some(routes) = orch.get("routes").and_then(toml_edit::Item::as_table) {
let mut rb = "[llm.routes]\n".to_owned();
for (key, val) in routes {
if let Some(arr) = val.as_array() {
let items: Vec<String> = arr
.iter()
.filter_map(toml_edit::Value::as_str)
.map(|s| format!("\"{s}\""))
.collect();
rb.push_str(&format!("{key} = [{}]\n", items.join(", ")));
}
}
routes_block = Some(rb);
}
if let Some(providers) = orch.get("providers").and_then(toml_edit::Item::as_table) {
for (name, pcfg_item) in providers {
let Some(pcfg) = pcfg_item.as_table() else {
continue;
};
let ptype = pcfg
.get("type")
.and_then(toml_edit::Item::as_str)
.unwrap_or("ollama");
let mut block =
format!("[[llm.providers]]\nname = \"{name}\"\ntype = \"{ptype}\"\n");
if name == &default_name {
block.push_str("default = true\n");
}
if name == &embed_name {
block.push_str("embed = true\n");
}
copy_str_field(pcfg, "model", &mut block);
copy_str_field(pcfg, "base_url", &mut block);
copy_str_field(pcfg, "embedding_model", &mut block);
if ptype == "claude" && !pcfg.contains_key("model") {
if let Some(cloud) =
llm.get("cloud").and_then(toml_edit::Item::as_table)
{
copy_str_field(cloud, "model", &mut block);
copy_int_field(cloud, "max_tokens", &mut block);
}
}
if ptype == "openai" && !pcfg.contains_key("model") {
if let Some(openai) =
llm.get("openai").and_then(toml_edit::Item::as_table)
{
copy_str_field(openai, "model", &mut block);
copy_str_field(openai, "base_url", &mut block);
copy_int_field(openai, "max_tokens", &mut block);
copy_str_field(openai, "embedding_model", &mut block);
}
}
if ptype == "ollama" && !pcfg.contains_key("base_url") {
if let Some(ref u) = base_url {
block.push_str(&format!("base_url = \"{u}\"\n"));
}
}
if ptype == "ollama" && !pcfg.contains_key("model") {
if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
}
if ptype == "ollama" && !pcfg.contains_key("embedding_model") {
if let Some(ref em) = embedding_model {
block.push_str(&format!("embedding_model = \"{em}\"\n"));
}
}
provider_blocks.push(block);
}
}
}
}
"router" => {
if let Some(router) = llm.get("router").and_then(toml_edit::Item::as_table) {
let strategy = router
.get("strategy")
.and_then(toml_edit::Item::as_str)
.unwrap_or("ema");
routing = Some(strategy.to_owned());
if let Some(chain) = router.get("chain").and_then(toml_edit::Item::as_array) {
for item in chain {
let name = item.as_str().unwrap_or_default();
let ptype = infer_provider_type(name, llm);
let mut block =
format!("[[llm.providers]]\nname = \"{name}\"\ntype = \"{ptype}\"\n");
match ptype {
"claude" => {
if let Some(cloud) =
llm.get("cloud").and_then(toml_edit::Item::as_table)
{
copy_str_field(cloud, "model", &mut block);
copy_int_field(cloud, "max_tokens", &mut block);
}
}
"openai" => {
if let Some(openai) =
llm.get("openai").and_then(toml_edit::Item::as_table)
{
copy_str_field(openai, "model", &mut block);
copy_str_field(openai, "base_url", &mut block);
copy_int_field(openai, "max_tokens", &mut block);
copy_str_field(openai, "embedding_model", &mut block);
} else {
if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
if let Some(ref u) = base_url {
block.push_str(&format!("base_url = \"{u}\"\n"));
}
}
}
"ollama" => {
if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
if let Some(ref em) = embedding_model {
block.push_str(&format!("embedding_model = \"{em}\"\n"));
}
if let Some(ref u) = base_url {
block.push_str(&format!("base_url = \"{u}\"\n"));
}
}
_ => {
if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
}
}
provider_blocks.push(block);
}
}
}
}
other => {
let mut block = format!("[[llm.providers]]\ntype = \"{other}\"\n");
if let Some(ref m) = model {
block.push_str(&format!("model = \"{m}\"\n"));
}
provider_blocks.push(block);
}
}
if provider_blocks.is_empty() {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
}
let mut new_llm = "[llm]\n".to_owned();
if let Some(ref r) = routing {
new_llm.push_str(&format!("routing = \"{r}\"\n"));
}
for key in &[
"response_cache_enabled",
"response_cache_ttl_secs",
"semantic_cache_enabled",
"semantic_cache_threshold",
"semantic_cache_max_candidates",
"summary_model",
"instruction_file",
] {
if let Some(val) = llm.get(key) {
if let Some(v) = val.as_value() {
let raw = value_to_toml_string(v);
if !raw.is_empty() {
new_llm.push_str(&format!("{key} = {raw}\n"));
}
}
}
}
new_llm.push('\n');
if let Some(rb) = routes_block {
new_llm.push_str(&rb);
new_llm.push('\n');
}
for block in &provider_blocks {
new_llm.push_str(block);
new_llm.push('\n');
}
let output = replace_llm_section(toml_src, &new_llm);
Ok(MigrationResult {
output,
added_count: provider_blocks.len(),
sections_added: vec!["llm.providers".to_owned()],
})
}
fn infer_provider_type<'a>(name: &str, llm: &'a toml_edit::Table) -> &'a str {
match name {
"claude" => "claude",
"openai" => "openai",
"gemini" => "gemini",
"ollama" => "ollama",
"candle" => "candle",
_ => {
if llm.contains_key("compatible") {
"compatible"
} else if llm.contains_key("openai") {
"openai"
} else {
"ollama"
}
}
}
}
fn copy_str_field(table: &toml_edit::Table, key: &str, out: &mut String) {
use std::fmt::Write as _;
if let Some(v) = table.get(key).and_then(toml_edit::Item::as_str) {
let _ = writeln!(out, "{key} = \"{v}\"");
}
}
fn copy_int_field(table: &toml_edit::Table, key: &str, out: &mut String) {
use std::fmt::Write as _;
if let Some(v) = table.get(key).and_then(toml_edit::Item::as_integer) {
let _ = writeln!(out, "{key} = {v}");
}
}
fn replace_llm_section(toml_str: &str, new_llm_section: &str) -> String {
let mut out = String::new();
let mut in_llm = false;
let mut skip_until_next_top = false;
for line in toml_str.lines() {
let trimmed = line.trim();
let is_top_section = (trimmed.starts_with('[') && !trimmed.starts_with("[["))
&& trimmed.ends_with(']')
&& !trimmed[1..trimmed.len() - 1].contains('.');
let is_top_aot = trimmed.starts_with("[[")
&& trimmed.ends_with("]]")
&& !trimmed[2..trimmed.len() - 2].contains('.');
let is_llm_sub = (trimmed.starts_with("[llm") || trimmed.starts_with("[[llm"))
&& (trimmed.contains(']'));
if is_llm_sub || (in_llm && !is_top_section && !is_top_aot) {
in_llm = true;
skip_until_next_top = true;
continue;
}
if is_top_section || is_top_aot {
if skip_until_next_top {
out.push_str(new_llm_section);
skip_until_next_top = false;
}
in_llm = false;
}
if !skip_until_next_top {
out.push_str(line);
out.push('\n');
}
}
if skip_until_next_top {
out.push_str(new_llm_section);
}
out
}
#[allow(clippy::too_many_lines)]
pub fn migrate_stt_to_provider(toml_src: &str) -> Result<MigrationResult, MigrateError> {
let mut doc = toml_src.parse::<toml_edit::DocumentMut>()?;
let stt_model = doc
.get("llm")
.and_then(toml_edit::Item::as_table)
.and_then(|llm| llm.get("stt"))
.and_then(toml_edit::Item::as_table)
.and_then(|stt| stt.get("model"))
.and_then(toml_edit::Item::as_str)
.map(ToOwned::to_owned);
let stt_base_url = doc
.get("llm")
.and_then(toml_edit::Item::as_table)
.and_then(|llm| llm.get("stt"))
.and_then(toml_edit::Item::as_table)
.and_then(|stt| stt.get("base_url"))
.and_then(toml_edit::Item::as_str)
.map(ToOwned::to_owned);
let stt_provider_hint = doc
.get("llm")
.and_then(toml_edit::Item::as_table)
.and_then(|llm| llm.get("stt"))
.and_then(toml_edit::Item::as_table)
.and_then(|stt| stt.get("provider"))
.and_then(toml_edit::Item::as_str)
.map(ToOwned::to_owned)
.unwrap_or_default();
if stt_model.is_none() && stt_base_url.is_none() {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
}
let stt_model = stt_model.unwrap_or_else(|| "whisper-1".to_owned());
let target_type = match stt_provider_hint.as_str() {
"candle-whisper" | "candle" => "candle",
_ => "openai",
};
let providers = doc
.get("llm")
.and_then(toml_edit::Item::as_table)
.and_then(|llm| llm.get("providers"))
.and_then(toml_edit::Item::as_array_of_tables);
let matching_idx = providers.and_then(|arr| {
arr.iter().enumerate().find_map(|(i, t)| {
let name = t
.get("name")
.and_then(toml_edit::Item::as_str)
.unwrap_or("");
let ptype = t
.get("type")
.and_then(toml_edit::Item::as_str)
.unwrap_or("");
let name_match = !stt_provider_hint.is_empty()
&& (name == stt_provider_hint || ptype == stt_provider_hint);
let type_match = ptype == target_type;
if name_match || type_match {
Some(i)
} else {
None
}
})
});
let resolved_provider_name: String;
if let Some(idx) = matching_idx {
let llm_mut = doc
.get_mut("llm")
.and_then(toml_edit::Item::as_table_mut)
.ok_or(MigrateError::InvalidStructure(
"[llm] table not accessible for mutation",
))?;
let providers_mut = llm_mut
.get_mut("providers")
.and_then(toml_edit::Item::as_array_of_tables_mut)
.ok_or(MigrateError::InvalidStructure(
"[[llm.providers]] array not accessible for mutation",
))?;
let entry = providers_mut
.iter_mut()
.nth(idx)
.ok_or(MigrateError::InvalidStructure(
"[[llm.providers]] entry index out of range during mutation",
))?;
let existing_name = entry
.get("name")
.and_then(toml_edit::Item::as_str)
.map(ToOwned::to_owned);
let entry_name = existing_name.unwrap_or_else(|| {
let t = entry
.get("type")
.and_then(toml_edit::Item::as_str)
.unwrap_or("openai");
format!("{t}-stt")
});
entry.insert("name", toml_edit::value(entry_name.clone()));
entry.insert("stt_model", toml_edit::value(stt_model.clone()));
if stt_base_url.is_some() && entry.get("base_url").is_none() {
entry.insert(
"base_url",
toml_edit::value(stt_base_url.as_deref().unwrap_or_default()),
);
}
resolved_provider_name = entry_name;
} else {
let new_name = if target_type == "candle" {
"local-whisper".to_owned()
} else {
"openai-stt".to_owned()
};
let mut new_entry = toml_edit::Table::new();
new_entry.insert("name", toml_edit::value(new_name.clone()));
new_entry.insert("type", toml_edit::value(target_type));
new_entry.insert("stt_model", toml_edit::value(stt_model.clone()));
if let Some(ref url) = stt_base_url {
new_entry.insert("base_url", toml_edit::value(url.clone()));
}
let llm_mut = doc
.get_mut("llm")
.and_then(toml_edit::Item::as_table_mut)
.ok_or(MigrateError::InvalidStructure(
"[llm] table not accessible for mutation",
))?;
if let Some(item) = llm_mut.get_mut("providers") {
if let Some(arr) = item.as_array_of_tables_mut() {
arr.push(new_entry);
}
} else {
let mut arr = toml_edit::ArrayOfTables::new();
arr.push(new_entry);
llm_mut.insert("providers", toml_edit::Item::ArrayOfTables(arr));
}
resolved_provider_name = new_name;
}
if let Some(stt_table) = doc
.get_mut("llm")
.and_then(toml_edit::Item::as_table_mut)
.and_then(|llm| llm.get_mut("stt"))
.and_then(toml_edit::Item::as_table_mut)
{
stt_table.insert("provider", toml_edit::value(resolved_provider_name.clone()));
stt_table.remove("model");
stt_table.remove("base_url");
}
Ok(MigrationResult {
output: doc.to_string(),
added_count: 1,
sections_added: vec!["llm.providers.stt_model".to_owned()],
})
}
pub fn migrate_planner_model_to_provider(toml_src: &str) -> Result<MigrationResult, MigrateError> {
let mut doc = toml_src.parse::<toml_edit::DocumentMut>()?;
let old_value = doc
.get("orchestration")
.and_then(toml_edit::Item::as_table)
.and_then(|t| t.get("planner_model"))
.and_then(toml_edit::Item::as_value)
.and_then(toml_edit::Value::as_str)
.map(ToOwned::to_owned);
let Some(old_model) = old_value else {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
};
let commented_out = format!(
"# planner_provider = \"{old_model}\" \
# MIGRATED: was planner_model; update to a [[llm.providers]] name"
);
let orch_table = doc
.get_mut("orchestration")
.and_then(toml_edit::Item::as_table_mut)
.ok_or(MigrateError::InvalidStructure(
"[orchestration] is not a table",
))?;
orch_table.remove("planner_model");
let decor = orch_table.decor_mut();
let existing_suffix = decor.suffix().and_then(|s| s.as_str()).unwrap_or("");
let new_suffix = if existing_suffix.trim().is_empty() {
format!("\n{commented_out}\n")
} else {
format!("{existing_suffix}\n{commented_out}\n")
};
decor.set_suffix(new_suffix);
eprintln!(
"Migration warning: [orchestration].planner_model has been renamed to planner_provider \
and its value commented out. `planner_provider` must reference a [[llm.providers]] \
`name` field, not a raw model name. Update or remove the commented line."
);
Ok(MigrationResult {
output: doc.to_string(),
added_count: 1,
sections_added: vec!["orchestration.planner_provider".to_owned()],
})
}
pub fn migrate_mcp_trust_levels(toml_src: &str) -> Result<MigrationResult, MigrateError> {
let mut doc = toml_src.parse::<toml_edit::DocumentMut>()?;
let mut added = 0usize;
let Some(mcp) = doc.get_mut("mcp").and_then(toml_edit::Item::as_table_mut) else {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
};
let Some(servers) = mcp
.get_mut("servers")
.and_then(toml_edit::Item::as_array_of_tables_mut)
else {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
};
for entry in servers.iter_mut() {
if !entry.contains_key("trust_level") {
entry.insert(
"trust_level",
toml_edit::value(toml_edit::Value::from("trusted")),
);
added += 1;
}
}
if added > 0 {
eprintln!(
"Migration: added trust_level = \"trusted\" to {added} [[mcp.servers]] \
entr{} (preserving previous SSRF-skip behavior). \
Review and adjust trust levels as needed.",
if added == 1 { "y" } else { "ies" }
);
}
Ok(MigrationResult {
output: doc.to_string(),
added_count: added,
sections_added: if added > 0 {
vec!["mcp.servers.trust_level".to_owned()]
} else {
Vec::new()
},
})
}
pub fn migrate_agent_retry_to_tools_retry(toml_src: &str) -> Result<MigrationResult, MigrateError> {
let mut doc = toml_src.parse::<toml_edit::DocumentMut>()?;
let max_retries = doc
.get("agent")
.and_then(toml_edit::Item::as_table)
.and_then(|t| t.get("max_tool_retries"))
.and_then(toml_edit::Item::as_value)
.and_then(toml_edit::Value::as_integer)
.map(i64::cast_unsigned);
let budget_secs = doc
.get("agent")
.and_then(toml_edit::Item::as_table)
.and_then(|t| t.get("max_retry_duration_secs"))
.and_then(toml_edit::Item::as_value)
.and_then(toml_edit::Value::as_integer)
.map(i64::cast_unsigned);
if max_retries.is_none() && budget_secs.is_none() {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
}
if !doc.contains_key("tools") {
doc.insert("tools", toml_edit::Item::Table(toml_edit::Table::new()));
}
let tools_table = doc
.get_mut("tools")
.and_then(toml_edit::Item::as_table_mut)
.ok_or(MigrateError::InvalidStructure("[tools] is not a table"))?;
if !tools_table.contains_key("retry") {
tools_table.insert("retry", toml_edit::Item::Table(toml_edit::Table::new()));
}
let retry_table = tools_table
.get_mut("retry")
.and_then(toml_edit::Item::as_table_mut)
.ok_or(MigrateError::InvalidStructure(
"[tools.retry] is not a table",
))?;
let mut added_count = 0usize;
if let Some(retries) = max_retries
&& !retry_table.contains_key("max_attempts")
{
retry_table.insert(
"max_attempts",
toml_edit::value(i64::try_from(retries).unwrap_or(2)),
);
added_count += 1;
}
if let Some(secs) = budget_secs
&& !retry_table.contains_key("budget_secs")
{
retry_table.insert(
"budget_secs",
toml_edit::value(i64::try_from(secs).unwrap_or(30)),
);
added_count += 1;
}
if added_count > 0 {
eprintln!(
"Migration: [agent].max_tool_retries / max_retry_duration_secs migrated to \
[tools.retry].max_attempts / budget_secs. Old fields preserved for compatibility."
);
}
Ok(MigrationResult {
output: doc.to_string(),
added_count,
sections_added: if added_count > 0 {
vec!["tools.retry".to_owned()]
} else {
Vec::new()
},
})
}
pub fn migrate_database_url(toml_src: &str) -> Result<MigrationResult, MigrateError> {
let mut doc = toml_src.parse::<toml_edit::DocumentMut>()?;
if !doc.contains_key("memory") {
doc.insert("memory", toml_edit::Item::Table(toml_edit::Table::new()));
}
let memory = doc
.get_mut("memory")
.and_then(toml_edit::Item::as_table_mut)
.ok_or(MigrateError::InvalidStructure(
"[memory] key exists but is not a table",
))?;
if memory.contains_key("database_url") {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
}
let comment = "# PostgreSQL connection URL (used when binary is compiled with --features postgres).\n\
# Leave empty and store the actual URL in the vault:\n\
# zeph vault set ZEPH_DATABASE_URL \"postgres://user:pass@localhost:5432/zeph\"\n\
# database_url = \"\"\n";
append_comment_to_table_suffix(memory, comment);
Ok(MigrationResult {
output: doc.to_string(),
added_count: 1,
sections_added: vec!["memory.database_url".to_owned()],
})
}
pub fn migrate_shell_transactional(toml_src: &str) -> Result<MigrationResult, MigrateError> {
let mut doc = toml_src.parse::<toml_edit::DocumentMut>()?;
let tools_shell_exists = doc
.get("tools")
.and_then(toml_edit::Item::as_table)
.is_some_and(|t| t.contains_key("shell"));
if !tools_shell_exists {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
}
let shell = doc
.get_mut("tools")
.and_then(toml_edit::Item::as_table_mut)
.and_then(|t| t.get_mut("shell"))
.and_then(toml_edit::Item::as_table_mut)
.ok_or(MigrateError::InvalidStructure(
"[tools.shell] is not a table",
))?;
if shell.contains_key("transactional") {
return Ok(MigrationResult {
output: toml_src.to_owned(),
added_count: 0,
sections_added: Vec::new(),
});
}
let comment = "# Transactional shell: snapshot files before write commands, rollback on failure.\n\
# transactional = false\n\
# transaction_scope = [] # glob patterns; empty = all extracted paths\n\
# auto_rollback = false # rollback when exit code >= 2\n\
# auto_rollback_exit_codes = [] # explicit exit codes; overrides >= 2 heuristic\n\
# snapshot_required = false # abort if snapshot fails (default: warn and proceed)\n";
append_comment_to_table_suffix(shell, comment);
Ok(MigrationResult {
output: doc.to_string(),
added_count: 1,
sections_added: vec!["tools.shell.transactional".to_owned()],
})
}
#[cfg(test)]
fn make_formatted_str(s: &str) -> Value {
use toml_edit::Formatted;
Value::String(Formatted::new(s.to_owned()))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_config_gets_sections_as_comments() {
let migrator = ConfigMigrator::new();
let result = migrator.migrate("").expect("migrate empty");
assert!(result.added_count > 0 || !result.sections_added.is_empty());
assert!(
result.output.contains("[agent]") || result.output.contains("# [agent]"),
"expected agent section in output, got:\n{}",
result.output
);
}
#[test]
fn existing_values_not_overwritten() {
let user = r#"
[agent]
name = "MyAgent"
max_tool_iterations = 5
"#;
let migrator = ConfigMigrator::new();
let result = migrator.migrate(user).expect("migrate");
assert!(
result.output.contains("name = \"MyAgent\""),
"user value should be preserved"
);
assert!(
result.output.contains("max_tool_iterations = 5"),
"user value should be preserved"
);
assert!(
!result.output.contains("# max_tool_iterations = 10"),
"already-set key should not appear as comment"
);
}
#[test]
fn missing_nested_key_added_as_comment() {
let user = r#"
[memory]
sqlite_path = ".zeph/data/zeph.db"
"#;
let migrator = ConfigMigrator::new();
let result = migrator.migrate(user).expect("migrate");
assert!(
result.output.contains("# history_limit"),
"missing key should be added as comment, got:\n{}",
result.output
);
}
#[test]
fn unknown_user_keys_preserved() {
let user = r#"
[agent]
name = "Test"
my_custom_key = "preserved"
"#;
let migrator = ConfigMigrator::new();
let result = migrator.migrate(user).expect("migrate");
assert!(
result.output.contains("my_custom_key = \"preserved\""),
"custom user keys must not be removed"
);
}
#[test]
fn idempotent() {
let migrator = ConfigMigrator::new();
let first = migrator
.migrate("[agent]\nname = \"Zeph\"\n")
.expect("first migrate");
let second = migrator.migrate(&first.output).expect("second migrate");
assert_eq!(
first.output, second.output,
"idempotent: full output must be identical on second run"
);
}
#[test]
fn malformed_input_returns_error() {
let migrator = ConfigMigrator::new();
let err = migrator
.migrate("[[invalid toml [[[")
.expect_err("should error");
assert!(
matches!(err, MigrateError::Parse(_)),
"expected Parse error"
);
}
#[test]
fn array_of_tables_preserved() {
let user = r#"
[mcp]
allowed_commands = ["npx"]
[[mcp.servers]]
id = "my-server"
command = "npx"
args = ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"]
"#;
let migrator = ConfigMigrator::new();
let result = migrator.migrate(user).expect("migrate");
assert!(
result.output.contains("[[mcp.servers]]"),
"array-of-tables entries must be preserved"
);
assert!(result.output.contains("id = \"my-server\""));
}
#[test]
fn canonical_ordering_applied() {
let user = r#"
[memory]
sqlite_path = ".zeph/data/zeph.db"
[agent]
name = "Test"
"#;
let migrator = ConfigMigrator::new();
let result = migrator.migrate(user).expect("migrate");
let agent_pos = result.output.find("[agent]");
let memory_pos = result.output.find("[memory]");
if let (Some(a), Some(m)) = (agent_pos, memory_pos) {
assert!(a < m, "agent section should precede memory section");
}
}
#[test]
fn value_to_toml_string_formats_correctly() {
use toml_edit::Formatted;
let s = make_formatted_str("hello");
assert_eq!(value_to_toml_string(&s), "\"hello\"");
let i = Value::Integer(Formatted::new(42_i64));
assert_eq!(value_to_toml_string(&i), "42");
let b = Value::Boolean(Formatted::new(true));
assert_eq!(value_to_toml_string(&b), "true");
let f = Value::Float(Formatted::new(1.0_f64));
assert_eq!(value_to_toml_string(&f), "1.0");
let f2 = Value::Float(Formatted::new(157_f64 / 50.0));
assert_eq!(value_to_toml_string(&f2), "3.14");
let arr: Array = ["a", "b"].iter().map(|s| make_formatted_str(s)).collect();
let arr_val = Value::Array(arr);
assert_eq!(value_to_toml_string(&arr_val), r#"["a", "b"]"#);
let empty_arr = Value::Array(Array::new());
assert_eq!(value_to_toml_string(&empty_arr), "[]");
}
#[test]
fn idempotent_full_output_unchanged() {
let migrator = ConfigMigrator::new();
let first = migrator
.migrate("[agent]\nname = \"Zeph\"\n")
.expect("first migrate");
let second = migrator.migrate(&first.output).expect("second migrate");
assert_eq!(
first.output, second.output,
"full output string must be identical after second migration pass"
);
}
#[test]
fn full_config_produces_zero_additions() {
let reference = include_str!("../config/default.toml");
let migrator = ConfigMigrator::new();
let result = migrator.migrate(reference).expect("migrate reference");
assert_eq!(
result.added_count, 0,
"migrating the canonical reference should add nothing (added_count = {})",
result.added_count
);
assert!(
result.sections_added.is_empty(),
"migrating the canonical reference should report no sections_added: {:?}",
result.sections_added
);
}
#[test]
fn empty_config_added_count_is_positive() {
let migrator = ConfigMigrator::new();
let result = migrator.migrate("").expect("migrate empty");
assert!(
result.added_count > 0,
"empty config must report added_count > 0"
);
}
#[test]
fn security_without_guardrail_gets_guardrail_commented() {
let user = "[security]\nredact_secrets = true\n";
let migrator = ConfigMigrator::new();
let result = migrator.migrate(user).expect("migrate");
assert!(
result.output.contains("guardrail"),
"migration must add guardrail keys for configs without [security.guardrail]: \
got:\n{}",
result.output
);
}
#[test]
fn migrate_reference_contains_tools_policy() {
let reference = include_str!("../config/default.toml");
assert!(
reference.contains("[tools.policy]"),
"default.toml must contain [tools.policy] section so migrate-config can surface it"
);
assert!(
reference.contains("enabled = false"),
"tools.policy section must include enabled = false default"
);
}
#[test]
fn migrate_reference_contains_probe_section() {
let reference = include_str!("../config/default.toml");
assert!(
reference.contains("[memory.compression.probe]"),
"default.toml must contain [memory.compression.probe] section comment"
);
assert!(
reference.contains("hard_fail_threshold"),
"probe section must include hard_fail_threshold default"
);
}
#[test]
fn migrate_llm_no_llm_section_is_noop() {
let src = "[agent]\nname = \"Zeph\"\n";
let result = migrate_llm_to_providers(src).expect("migrate");
assert_eq!(result.added_count, 0);
assert_eq!(result.output, src);
}
#[test]
fn migrate_llm_already_new_format_is_noop() {
let src = r#"
[llm]
[[llm.providers]]
type = "ollama"
model = "qwen3:8b"
"#;
let result = migrate_llm_to_providers(src).expect("migrate");
assert_eq!(result.added_count, 0);
}
#[test]
fn migrate_llm_ollama_produces_providers_block() {
let src = r#"
[llm]
provider = "ollama"
model = "qwen3:8b"
base_url = "http://localhost:11434"
embedding_model = "nomic-embed-text"
"#;
let result = migrate_llm_to_providers(src).expect("migrate");
assert!(
result.output.contains("[[llm.providers]]"),
"should contain [[llm.providers]]:\n{}",
result.output
);
assert!(
result.output.contains("type = \"ollama\""),
"{}",
result.output
);
assert!(
result.output.contains("model = \"qwen3:8b\""),
"{}",
result.output
);
}
#[test]
fn migrate_llm_claude_produces_providers_block() {
let src = r#"
[llm]
provider = "claude"
[llm.cloud]
model = "claude-sonnet-4-6"
max_tokens = 8192
server_compaction = true
"#;
let result = migrate_llm_to_providers(src).expect("migrate");
assert!(
result.output.contains("[[llm.providers]]"),
"{}",
result.output
);
assert!(
result.output.contains("type = \"claude\""),
"{}",
result.output
);
assert!(
result.output.contains("model = \"claude-sonnet-4-6\""),
"{}",
result.output
);
assert!(
result.output.contains("server_compaction = true"),
"{}",
result.output
);
}
#[test]
fn migrate_llm_openai_copies_fields() {
let src = r#"
[llm]
provider = "openai"
[llm.openai]
base_url = "https://api.openai.com/v1"
model = "gpt-4o"
max_tokens = 4096
"#;
let result = migrate_llm_to_providers(src).expect("migrate");
assert!(
result.output.contains("type = \"openai\""),
"{}",
result.output
);
assert!(
result
.output
.contains("base_url = \"https://api.openai.com/v1\""),
"{}",
result.output
);
}
#[test]
fn migrate_llm_gemini_copies_fields() {
let src = r#"
[llm]
provider = "gemini"
[llm.gemini]
model = "gemini-2.0-flash"
max_tokens = 8192
base_url = "https://generativelanguage.googleapis.com"
"#;
let result = migrate_llm_to_providers(src).expect("migrate");
assert!(
result.output.contains("type = \"gemini\""),
"{}",
result.output
);
assert!(
result.output.contains("model = \"gemini-2.0-flash\""),
"{}",
result.output
);
}
#[test]
fn migrate_llm_compatible_copies_multiple_entries() {
let src = r#"
[llm]
provider = "compatible"
[[llm.compatible]]
name = "proxy-a"
base_url = "http://proxy-a:8080/v1"
model = "llama3"
max_tokens = 4096
[[llm.compatible]]
name = "proxy-b"
base_url = "http://proxy-b:8080/v1"
model = "mistral"
max_tokens = 2048
"#;
let result = migrate_llm_to_providers(src).expect("migrate");
let count = result.output.matches("[[llm.providers]]").count();
assert_eq!(
count, 2,
"expected 2 [[llm.providers]] blocks:\n{}",
result.output
);
assert!(
result.output.contains("name = \"proxy-a\""),
"{}",
result.output
);
assert!(
result.output.contains("name = \"proxy-b\""),
"{}",
result.output
);
}
#[test]
fn migrate_llm_mixed_format_errors() {
let src = r#"
[llm]
provider = "ollama"
[[llm.providers]]
type = "ollama"
"#;
assert!(
migrate_llm_to_providers(src).is_err(),
"mixed format must return error"
);
}
#[test]
fn stt_migration_no_stt_section_returns_unchanged() {
let src = "[llm]\n\n[[llm.providers]]\ntype = \"openai\"\nname = \"quality\"\nmodel = \"gpt-5.4\"\n";
let result = migrate_stt_to_provider(src).unwrap();
assert_eq!(result.added_count, 0);
assert_eq!(result.output, src);
}
#[test]
fn stt_migration_no_model_or_base_url_returns_unchanged() {
let src = "[llm]\n\n[[llm.providers]]\ntype = \"openai\"\nname = \"quality\"\n\n[llm.stt]\nprovider = \"quality\"\nlanguage = \"en\"\n";
let result = migrate_stt_to_provider(src).unwrap();
assert_eq!(result.added_count, 0);
}
#[test]
fn stt_migration_moves_model_to_provider_entry() {
let src = r#"
[llm]
[[llm.providers]]
type = "openai"
name = "quality"
model = "gpt-5.4"
[llm.stt]
provider = "quality"
model = "gpt-4o-mini-transcribe"
language = "en"
"#;
let result = migrate_stt_to_provider(src).unwrap();
assert_eq!(result.added_count, 1);
assert!(
result.output.contains("stt_model"),
"stt_model must be in output"
);
let doc: toml_edit::DocumentMut = result.output.parse().unwrap();
let stt = doc
.get("llm")
.and_then(toml_edit::Item::as_table)
.and_then(|l| l.get("stt"))
.and_then(toml_edit::Item::as_table)
.unwrap();
assert!(
stt.get("model").is_none(),
"model must be removed from [llm.stt]"
);
assert_eq!(
stt.get("provider").and_then(toml_edit::Item::as_str),
Some("quality")
);
}
#[test]
fn stt_migration_creates_new_provider_when_no_match() {
let src = r#"
[llm]
[[llm.providers]]
type = "ollama"
name = "local"
model = "qwen3:8b"
[llm.stt]
provider = "whisper"
model = "whisper-1"
base_url = "https://api.openai.com/v1"
language = "en"
"#;
let result = migrate_stt_to_provider(src).unwrap();
assert!(
result.output.contains("openai-stt"),
"new entry name must be openai-stt"
);
assert!(
result.output.contains("stt_model"),
"stt_model must be in output"
);
}
#[test]
fn stt_migration_candle_whisper_creates_candle_entry() {
let src = r#"
[llm]
[llm.stt]
provider = "candle-whisper"
model = "openai/whisper-tiny"
language = "auto"
"#;
let result = migrate_stt_to_provider(src).unwrap();
assert!(
result.output.contains("local-whisper"),
"candle entry name must be local-whisper"
);
assert!(result.output.contains("candle"), "type must be candle");
}
#[test]
fn stt_migration_w2_assigns_explicit_name() {
let src = r#"
[llm]
[[llm.providers]]
type = "openai"
model = "gpt-5.4"
[llm.stt]
provider = "openai"
model = "whisper-1"
language = "auto"
"#;
let result = migrate_stt_to_provider(src).unwrap();
let doc: toml_edit::DocumentMut = result.output.parse().unwrap();
let providers = doc
.get("llm")
.and_then(toml_edit::Item::as_table)
.and_then(|l| l.get("providers"))
.and_then(toml_edit::Item::as_array_of_tables)
.unwrap();
let entry = providers
.iter()
.find(|t| t.get("stt_model").is_some())
.unwrap();
assert!(
entry.get("name").is_some(),
"migrated entry must have explicit name"
);
}
#[test]
fn stt_migration_removes_base_url_from_stt_table() {
let src = r#"
[llm]
[[llm.providers]]
type = "openai"
name = "quality"
model = "gpt-5.4"
[llm.stt]
provider = "quality"
model = "whisper-1"
base_url = "https://api.openai.com/v1"
language = "en"
"#;
let result = migrate_stt_to_provider(src).unwrap();
let doc: toml_edit::DocumentMut = result.output.parse().unwrap();
let stt = doc
.get("llm")
.and_then(toml_edit::Item::as_table)
.and_then(|l| l.get("stt"))
.and_then(toml_edit::Item::as_table)
.unwrap();
assert!(
stt.get("model").is_none(),
"model must be removed from [llm.stt]"
);
assert!(
stt.get("base_url").is_none(),
"base_url must be removed from [llm.stt]"
);
}
#[test]
fn migrate_planner_model_to_provider_with_field() {
let input = r#"
[orchestration]
enabled = true
planner_model = "gpt-4o"
max_tasks = 20
"#;
let result = migrate_planner_model_to_provider(input).expect("migration must succeed");
assert_eq!(result.added_count, 1, "added_count must be 1");
assert!(
!result.output.contains("planner_model = "),
"planner_model key must be removed from output"
);
assert!(
result.output.contains("# planner_provider"),
"commented-out planner_provider entry must be present"
);
assert!(
result.output.contains("gpt-4o"),
"old value must appear in the comment"
);
assert!(
result.output.contains("MIGRATED"),
"comment must include MIGRATED marker"
);
}
#[test]
fn migrate_planner_model_to_provider_no_op() {
let input = r"
[orchestration]
enabled = true
max_tasks = 20
";
let result = migrate_planner_model_to_provider(input).expect("migration must succeed");
assert_eq!(
result.added_count, 0,
"added_count must be 0 when field is absent"
);
assert_eq!(
result.output, input,
"output must equal input when nothing to migrate"
);
}
#[test]
fn migrate_error_invalid_structure_formats_correctly() {
let err = MigrateError::InvalidStructure("test sentinel");
assert!(
matches!(err, MigrateError::InvalidStructure(_)),
"variant must match"
);
let msg = err.to_string();
assert!(
msg.contains("invalid TOML structure"),
"error message must mention 'invalid TOML structure', got: {msg}"
);
assert!(
msg.contains("test sentinel"),
"message must include reason: {msg}"
);
}
#[test]
fn migrate_mcp_trust_levels_adds_trusted_to_entries_without_field() {
let src = r#"
[mcp]
allowed_commands = ["npx"]
[[mcp.servers]]
id = "srv-a"
command = "npx"
args = ["-y", "some-mcp"]
[[mcp.servers]]
id = "srv-b"
command = "npx"
args = ["-y", "other-mcp"]
"#;
let result = migrate_mcp_trust_levels(src).expect("migrate");
assert_eq!(
result.added_count, 2,
"both entries must get trust_level added"
);
assert!(
result
.sections_added
.contains(&"mcp.servers.trust_level".to_owned()),
"sections_added must report mcp.servers.trust_level"
);
let occurrences = result.output.matches("trust_level = \"trusted\"").count();
assert_eq!(
occurrences, 2,
"each entry must have trust_level = \"trusted\""
);
}
#[test]
fn migrate_mcp_trust_levels_does_not_overwrite_existing_field() {
let src = r#"
[[mcp.servers]]
id = "srv-a"
command = "npx"
trust_level = "sandboxed"
tool_allowlist = ["read_file"]
[[mcp.servers]]
id = "srv-b"
command = "npx"
"#;
let result = migrate_mcp_trust_levels(src).expect("migrate");
assert_eq!(
result.added_count, 1,
"only entry without trust_level gets updated"
);
assert!(
result.output.contains("trust_level = \"sandboxed\""),
"existing trust_level must not be overwritten"
);
assert!(
result.output.contains("trust_level = \"trusted\""),
"entry without trust_level must get trusted"
);
}
#[test]
fn migrate_mcp_trust_levels_no_mcp_section_is_noop() {
let src = "[agent]\nname = \"Zeph\"\n";
let result = migrate_mcp_trust_levels(src).expect("migrate");
assert_eq!(result.added_count, 0);
assert!(result.sections_added.is_empty());
assert_eq!(result.output, src);
}
#[test]
fn migrate_mcp_trust_levels_no_servers_is_noop() {
let src = "[mcp]\nallowed_commands = [\"npx\"]\n";
let result = migrate_mcp_trust_levels(src).expect("migrate");
assert_eq!(result.added_count, 0);
assert!(result.sections_added.is_empty());
assert_eq!(result.output, src);
}
#[test]
fn migrate_mcp_trust_levels_all_entries_already_have_field_is_noop() {
let src = r#"
[[mcp.servers]]
id = "srv-a"
trust_level = "trusted"
[[mcp.servers]]
id = "srv-b"
trust_level = "untrusted"
"#;
let result = migrate_mcp_trust_levels(src).expect("migrate");
assert_eq!(result.added_count, 0);
assert!(result.sections_added.is_empty());
}
#[test]
fn migrate_database_url_adds_comment_when_absent() {
let src = "[memory]\nsqlite_path = \"/tmp/zeph.db\"\n";
let result = migrate_database_url(src).expect("migrate");
assert_eq!(result.added_count, 1);
assert!(
result
.sections_added
.contains(&"memory.database_url".to_owned())
);
assert!(result.output.contains("# database_url = \"\""));
}
#[test]
fn migrate_database_url_is_noop_when_present() {
let src = "[memory]\nsqlite_path = \"/tmp/zeph.db\"\ndatabase_url = \"postgres://localhost/zeph\"\n";
let result = migrate_database_url(src).expect("migrate");
assert_eq!(result.added_count, 0);
assert!(result.sections_added.is_empty());
assert_eq!(result.output, src);
}
#[test]
fn migrate_database_url_creates_memory_section_when_absent() {
let src = "[agent]\nname = \"Zeph\"\n";
let result = migrate_database_url(src).expect("migrate");
assert_eq!(result.added_count, 1);
assert!(result.output.contains("# database_url = \"\""));
}
}