use super::*;
use crate::core::CustomProviderConfig;
use crate::defaults::{self, SyntaxHighlightingDefaults, WorkspacePathsDefaults};
use crate::ide_context::{
IdeContextProviderConfig, IdeContextProviderMode, IdeContextProvidersConfig,
};
use crate::loader::layers::ConfigLayerSource;
use serial_test::serial;
use std::fs;
use std::io::Write;
use std::sync::Arc;
use tempfile::NamedTempFile;
use vtcode_commons::reference::StaticWorkspacePaths;
#[test]
#[serial]
fn test_layered_config_loading() {
let workspace = assert_fs::TempDir::new().expect("failed to create workspace");
let workspace_root = workspace.path();
let home_dir = workspace_root.join("home");
fs::create_dir_all(&home_dir).expect("failed to create home dir");
let user_config_path = home_dir.join("vtcode.toml");
fs::write(&user_config_path, "agent.provider = \"anthropic\"")
.expect("failed to write user config");
let workspace_config_path = workspace_root.join("vtcode.toml");
fs::write(
&workspace_config_path,
"agent.default_model = \"claude-haiku-4-5\"",
)
.expect("failed to write workspace config");
let static_paths = StaticWorkspacePaths::new(workspace_root, workspace_root.join(".vtcode"));
let provider = WorkspacePathsDefaults::new(Arc::new(static_paths))
.with_home_paths(vec![user_config_path.clone()]);
defaults::provider::with_config_defaults_provider_for_test(Arc::new(provider), || {
let manager =
ConfigManager::load_from_workspace(workspace_root).expect("failed to load config");
assert_eq!(manager.config().agent.provider, "anthropic");
assert_eq!(manager.config().agent.default_model, "claude-haiku-4-5");
let layers = manager.layer_stack().layers();
assert_eq!(layers.len(), 2);
assert!(matches!(layers[0].source, ConfigLayerSource::User { .. }));
assert!(matches!(
layers[1].source,
ConfigLayerSource::Workspace { .. }
));
});
}
#[test]
#[serial]
fn test_invalid_layer_is_reported_with_source_context() {
let workspace = assert_fs::TempDir::new().expect("failed to create workspace");
let workspace_root = workspace.path();
let home_dir = workspace_root.join("home");
fs::create_dir_all(&home_dir).expect("failed to create home dir");
let user_config_path = home_dir.join("vtcode.toml");
fs::write(&user_config_path, "[agent\nprovider = \"openai\"")
.expect("failed to write invalid user config");
let workspace_config_path = workspace_root.join("vtcode.toml");
fs::write(&workspace_config_path, "agent.provider = \"anthropic\"")
.expect("failed to write workspace config");
let static_paths = StaticWorkspacePaths::new(workspace_root, workspace_root.join(".vtcode"));
let provider = WorkspacePathsDefaults::new(Arc::new(static_paths))
.with_home_paths(vec![user_config_path.clone()]);
defaults::provider::with_config_defaults_provider_for_test(Arc::new(provider), || {
let result = ConfigManager::load_from_workspace(workspace_root);
assert!(result.is_err(), "expected load to fail for invalid layer");
let error = match result {
Ok(_) => String::new(),
Err(err) => format!("{:#}", err),
};
assert!(
error.contains(&user_config_path.display().to_string()),
"error should include invalid layer path, got: {}",
error
);
});
}
#[test]
#[serial]
fn test_config_builder_overrides() {
let workspace = assert_fs::TempDir::new().expect("failed to create workspace");
let workspace_root = workspace.path();
let workspace_config_path = workspace_root.join("vtcode.toml");
fs::write(&workspace_config_path, "agent.provider = \"openai\"")
.expect("failed to write workspace config");
let static_paths = StaticWorkspacePaths::new(workspace_root, workspace_root.join(".vtcode"));
let provider = WorkspacePathsDefaults::new(Arc::new(static_paths)).with_home_paths(vec![]);
defaults::provider::with_config_defaults_provider_for_test(Arc::new(provider), || {
let manager = ConfigBuilder::new()
.workspace(workspace_root.to_path_buf())
.cli_override(
"agent.provider".to_string(),
toml::Value::String("gemini".to_string()),
)
.cli_override(
"agent.default_model".to_string(),
toml::Value::String("gemini-1.5-pro".to_string()),
)
.build()
.expect("failed to build config");
assert_eq!(manager.config().agent.provider, "gemini");
assert_eq!(manager.config().agent.default_model, "gemini-1.5-pro");
let layers = manager.layer_stack().layers();
assert_eq!(layers.len(), 2);
assert!(matches!(
layers[0].source,
ConfigLayerSource::Workspace { .. }
));
assert!(matches!(layers[1].source, ConfigLayerSource::Runtime));
});
}
#[test]
fn test_insert_dotted_key() {
let mut table = toml::Table::new();
ConfigBuilder::insert_dotted_key(
&mut table,
"a.b.c",
toml::Value::String("value".to_string()),
);
let a = table.get("a").unwrap().as_table().unwrap();
let b = a.get("b").unwrap().as_table().unwrap();
let c = b.get("c").unwrap().as_str().unwrap();
assert_eq!(c, "value");
}
#[test]
fn test_merge_toml_values() {
let mut base = toml::from_str::<toml::Value>(
r#"
[agent]
provider = "openai"
[tools]
default_policy = "prompt"
"#,
)
.unwrap();
let overlay = toml::from_str::<toml::Value>(
r#"
[agent]
provider = "anthropic"
default_model = "claude-3"
"#,
)
.unwrap();
merge_toml_values(&mut base, &overlay);
let agent = base.get("agent").unwrap().as_table().unwrap();
assert_eq!(
agent.get("provider").unwrap().as_str().unwrap(),
"anthropic"
);
assert_eq!(
agent.get("default_model").unwrap().as_str().unwrap(),
"claude-3"
);
let tools = base.get("tools").unwrap().as_table().unwrap();
assert_eq!(
tools.get("default_policy").unwrap().as_str().unwrap(),
"prompt"
);
}
#[test]
fn test_merge_toml_values_with_origins_tracks_winning_layer() {
use crate::loader::layers::ConfigLayerMetadata;
let mut base = toml::from_str::<toml::Value>(
r#"
[agent]
provider = "openai"
"#,
)
.unwrap();
let overlay = toml::from_str::<toml::Value>(
r#"
[agent]
provider = "anthropic"
"#,
)
.unwrap();
let layer = ConfigLayerMetadata {
name: "workspace:/tmp/vtcode.toml".to_string(),
version: "abc123".to_string(),
};
let mut origins = hashbrown::HashMap::new();
merge_toml_values_with_origins(&mut base, &overlay, &mut origins, &layer);
assert_eq!(
base.get("agent")
.and_then(|agent| agent.get("provider"))
.and_then(toml::Value::as_str),
Some("anthropic")
);
assert_eq!(origins.get("agent.provider"), Some(&layer));
}
#[test]
#[serial]
fn syntax_highlighting_defaults_are_valid() {
let config = SyntaxHighlightingConfig::default();
config
.validate()
.expect("default syntax highlighting config should be valid");
assert!(
config.enabled_languages.is_empty(),
"default enabled_languages should be empty to allow all syntect grammars"
);
}
#[test]
fn vtcode_config_validation_fails_for_invalid_highlight_timeout() {
let mut config = VTCodeConfig::default();
config.syntax_highlighting.highlight_timeout_ms = 0;
let error = config
.validate()
.expect_err("validation should fail for zero highlight timeout");
assert!(
format!("{:#}", error).contains("highlight"),
"expected error to mention highlight, got: {:#}",
error
);
}
#[test]
fn load_from_file_rejects_invalid_syntax_highlighting() {
let mut temp_file = NamedTempFile::new().expect("failed to create temp file");
writeln!(
temp_file,
"[syntax_highlighting]\nhighlight_timeout_ms = 0\n"
)
.expect("failed to write temp config");
let result = ConfigManager::load_from_file(temp_file.path());
assert!(result.is_err(), "expected validation error");
let error = format!("{:?}", result.err().unwrap());
assert!(
error.contains("validate"),
"expected validation context in error, got: {}",
error
);
}
#[test]
fn ide_context_fields_round_trip_through_toml() {
let mut config = VTCodeConfig::default();
config.ide_context.enabled = false;
config.ide_context.inject_into_prompt = false;
config.ide_context.show_in_tui = false;
config.ide_context.include_selection_text = false;
config.ide_context.provider_mode = IdeContextProviderMode::Zed;
config.ide_context.providers = IdeContextProvidersConfig {
vscode_compatible: IdeContextProviderConfig { enabled: false },
zed: IdeContextProviderConfig { enabled: true },
generic: IdeContextProviderConfig { enabled: false },
};
let serialized = toml::to_string(&config).expect("serialize config");
let parsed: VTCodeConfig = toml::from_str(&serialized).expect("parse config");
assert!(!parsed.ide_context.enabled);
assert!(!parsed.ide_context.inject_into_prompt);
assert!(!parsed.ide_context.show_in_tui);
assert!(!parsed.ide_context.include_selection_text);
assert_eq!(
parsed.ide_context.provider_mode,
IdeContextProviderMode::Zed
);
assert!(!parsed.ide_context.providers.vscode_compatible.enabled);
assert!(parsed.ide_context.providers.zed.enabled);
assert!(!parsed.ide_context.providers.generic.enabled);
}
#[test]
fn custom_providers_fields_round_trip_through_toml() {
let mut config = VTCodeConfig::default();
config.custom_providers.push(CustomProviderConfig {
name: "mycorp".to_string(),
display_name: "MyCorp".to_string(),
base_url: "https://llm.corp.example/v1".to_string(),
api_key_env: "MYCORP_API_KEY".to_string(),
auth: None,
model: "gpt-5-mini".to_string(),
});
let serialized = toml::to_string(&config).expect("serialize config");
let parsed: VTCodeConfig = toml::from_str(&serialized).expect("parse config");
parsed
.validate()
.expect("custom provider config should validate");
assert_eq!(parsed.custom_providers.len(), 1);
let provider = &parsed.custom_providers[0];
assert_eq!(provider.name, "mycorp");
assert_eq!(provider.display_name, "MyCorp");
assert_eq!(provider.base_url, "https://llm.corp.example/v1");
assert_eq!(provider.api_key_env, "MYCORP_API_KEY");
assert_eq!(provider.model, "gpt-5-mini");
}
#[test]
fn loader_loads_prompt_cache_retention_from_toml() {
use std::fs::File;
use std::io::Write;
let temp = tempfile::tempdir().unwrap();
let path = temp.path().join("vtcode.toml");
let mut file = File::create(&path).unwrap();
let contents = r#"
[prompt_cache]
enabled = true
cache_friendly_prompt_shaping = true
[prompt_cache.providers.openai]
prompt_cache_retention = "24h"
prompt_cache_key_mode = "off"
"#;
file.write_all(contents.as_bytes()).unwrap();
let manager = ConfigManager::load_from_file(&path).unwrap();
let config = manager.config();
assert_eq!(
config.prompt_cache.providers.openai.prompt_cache_retention,
Some("24h".to_string())
);
assert_eq!(
config.prompt_cache.providers.openai.prompt_cache_key_mode,
crate::core::OpenAIPromptCacheKeyMode::Off
);
assert!(config.prompt_cache.cache_friendly_prompt_shaping);
}
#[test]
fn loader_loads_tools_editor_config_from_toml() {
use std::fs::File;
use std::io::Write;
let temp = tempfile::tempdir().unwrap();
let path = temp.path().join("vtcode.toml");
let mut file = File::create(&path).unwrap();
let contents = r#"
[tools.editor]
enabled = true
preferred_editor = "code --wait"
suspend_tui = false
"#;
file.write_all(contents.as_bytes()).unwrap();
let manager = ConfigManager::load_from_file(&path).unwrap();
let config = manager.config();
assert!(config.tools.editor.enabled);
assert_eq!(config.tools.editor.preferred_editor, "code --wait");
assert!(!config.tools.editor.suspend_tui);
}
#[test]
fn save_config_preserves_comments() {
use std::io::Write;
let mut temp_file = NamedTempFile::new().expect("failed to create temp file");
let config_with_comments = r#"# This is a test comment
[agent]
# Provider comment
provider = "anthropic"
default_model = "gpt-5-nano"
# Tools section comment
[tools]
default_policy = "deny"
"#;
write!(temp_file, "{}", config_with_comments).expect("failed to write temp config");
temp_file.flush().expect("failed to flush");
let manager = ConfigManager::load_from_file(temp_file.path()).expect("failed to load config");
let mut modified_config = manager.config().clone();
modified_config.agent.default_model = "gpt-5".to_string();
ConfigManager::save_config_to_path(temp_file.path(), &modified_config)
.expect("failed to save config");
let saved_content = fs::read_to_string(temp_file.path()).expect("failed to read saved config");
assert!(
saved_content.contains("# This is a test comment"),
"top-level comment should be preserved"
);
assert!(
saved_content.contains("# Provider comment"),
"inline comment should be preserved"
);
assert!(
saved_content.contains("# Tools section comment"),
"section comment should be preserved"
);
assert!(
saved_content.contains("gpt-5"),
"modified value should be present"
);
}
#[test]
#[serial]
fn config_defaults_provider_overrides_paths_and_theme() {
let workspace = assert_fs::TempDir::new().expect("failed to create workspace");
let workspace_root = workspace.path();
let config_dir = workspace_root.join("config-root");
fs::create_dir_all(&config_dir).expect("failed to create config directory");
let config_file_name = "custom-config.toml";
let config_path = config_dir.join(config_file_name);
let serialized =
toml::to_string(&VTCodeConfig::default()).expect("failed to serialize default config");
fs::write(&config_path, serialized).expect("failed to write config file");
let static_paths = StaticWorkspacePaths::new(workspace_root, &config_dir);
let provider = WorkspacePathsDefaults::new(Arc::new(static_paths))
.with_config_file_name(config_file_name)
.with_home_paths(Vec::new())
.with_syntax_theme("custom-theme")
.with_syntax_languages(vec!["zig".to_string()]);
defaults::provider::with_config_defaults_provider_for_test(Arc::new(provider), || {
let manager = ConfigManager::load_from_workspace(workspace_root)
.expect("failed to load workspace config");
let resolved_path = manager
.config_path()
.expect("config path should be resolved");
let resolved_canonical =
fs::canonicalize(resolved_path).expect("resolved config path should canonicalize");
let expected_canonical =
fs::canonicalize(&config_path).expect("expected config path should canonicalize");
assert_eq!(resolved_canonical, expected_canonical);
assert_eq!(SyntaxHighlightingDefaults::theme(), "custom-theme");
assert_eq!(
SyntaxHighlightingDefaults::enabled_languages(),
vec!["zig".to_string()]
);
});
}
#[test]
#[serial]
fn save_config_updates_disk_file() {
let temp_dir = tempfile::tempdir().unwrap();
let workspace = temp_dir.path();
let config_path = workspace.join("vtcode.toml");
let initial_config = r#"
[ui]
display_mode = "minimal"
show_sidebar = false
"#;
fs::write(&config_path, initial_config).expect("failed to write initial config");
let mut manager = ConfigManager::load_from_workspace(workspace).expect("failed to load config");
assert_eq!(
manager.config().ui.display_mode,
crate::UiDisplayMode::Minimal
);
let mut modified_config = manager.config().clone();
modified_config.ui.display_mode = crate::UiDisplayMode::Full;
modified_config.ui.show_sidebar = true;
manager
.save_config(&modified_config)
.expect("failed to save config");
let saved_content = fs::read_to_string(&config_path).expect("failed to read saved config");
assert!(
saved_content.contains("display_mode = \"full\""),
"saved config should contain full display_mode. Got:\n{}",
saved_content
);
assert!(
!saved_content.contains("show_sidebar"),
"saved config should prune default show_sidebar. Got:\n{}",
saved_content
);
let new_manager =
ConfigManager::load_from_workspace(workspace).expect("failed to reload config");
assert_eq!(
new_manager.config().ui.display_mode,
crate::UiDisplayMode::Full,
"reloaded config should have full display_mode"
);
let new_manager2 =
ConfigManager::load_from_file(&config_path).expect("failed to reload from file");
assert!(
new_manager2.config().ui.show_sidebar,
"reloaded config should have show_sidebar = true, got: {}",
new_manager2.config().ui.show_sidebar
);
}
#[test]
#[serial]
fn save_config_writes_sparse_model_theme_and_mode_values() {
let temp_dir = tempfile::tempdir().unwrap();
let workspace = temp_dir.path();
let config_path = workspace.join("vtcode.toml");
fs::write(&config_path, "").expect("failed to write initial config");
let mut manager = ConfigManager::load_from_workspace(workspace).expect("failed to load config");
let mut modified_config = manager.config().clone();
modified_config.agent.default_model = "gpt-5.4".to_string();
modified_config.agent.theme = "ansi".to_string();
modified_config.permissions.default_mode = crate::PermissionMode::Plan;
manager
.save_config(&modified_config)
.expect("failed to save config");
let saved_content = fs::read_to_string(&config_path).expect("failed to read saved config");
assert!(saved_content.contains("[agent]"));
assert!(saved_content.contains("default_model = \"gpt-5.4\""));
assert!(saved_content.contains("theme = \"ansi\""));
assert!(saved_content.contains("[permissions]"));
assert!(saved_content.contains("default_mode = \"plan\""));
assert!(
!saved_content.contains("provider = \"openai\""),
"default agent provider should not be expanded. Got:\n{}",
saved_content
);
assert!(
!saved_content.contains("[ui]"),
"default UI section should not be expanded. Got:\n{}",
saved_content
);
let reloaded = ConfigManager::load_from_workspace(workspace).expect("failed to reload config");
assert_eq!(reloaded.config().agent.default_model, "gpt-5.4");
assert_eq!(reloaded.config().agent.theme, "ansi");
assert_eq!(
reloaded.config().permissions.default_mode,
crate::PermissionMode::Plan
);
}
#[test]
#[serial]
fn save_config_removes_deprecated_config_keys() {
let temp_dir = tempfile::tempdir().unwrap();
let workspace = temp_dir.path();
let config_path = workspace.join("vtcode.toml");
fs::write(
&config_path,
r#"
project_doc_max_bytes = 1
project_doc_fallback_filenames = ["RULES.md"]
[agent]
default_model = "gpt-5.4"
autonomous_mode = true
default_editing_mode = "plan"
[permissions]
default_mode = "plan"
allowed_tools = ["read_file"]
disallowed_tools = ["unified_exec"]
"#,
)
.expect("failed to write initial config");
let mut manager = ConfigManager::load_from_workspace(workspace).expect("failed to load config");
let config = manager.config().clone();
manager.save_config(&config).expect("failed to save config");
let saved_content = fs::read_to_string(&config_path).expect("failed to read saved config");
for removed_key in [
"project_doc_max_bytes",
"project_doc_fallback_filenames",
"autonomous_mode",
"default_editing_mode",
"allowed_tools",
"disallowed_tools",
] {
assert!(
!saved_content.contains(removed_key),
"saved config should remove deprecated key {removed_key}. Got:\n{saved_content}"
);
}
assert!(saved_content.contains("default_model = \"gpt-5.4\""));
assert!(saved_content.contains("default_mode = \"plan\""));
}