use std::collections::{BTreeMap, HashMap, HashSet};
use std::rc::Rc;
use std::sync::{Mutex, OnceLock};
use crate::events::{emit_log, EventLevel};
use crate::value::{VmError, VmValue};
static PROVIDER_KEY_CACHE: OnceLock<Mutex<HashMap<String, bool>>> = OnceLock::new();
static MODEL_TIER_WARNING_CACHE: OnceLock<Mutex<HashSet<String>>> = OnceLock::new();
static PROVIDER_INFERENCE_WARNING_CACHE: OnceLock<Mutex<HashSet<String>>> = OnceLock::new();
pub(crate) fn provider_key_available(provider: &str) -> bool {
let cache = PROVIDER_KEY_CACHE.get_or_init(|| Mutex::new(HashMap::new()));
let mut map = cache.lock().unwrap();
if let Some(&available) = map.get(provider) {
return available;
}
let available = resolve_api_key(provider).is_ok();
map.insert(provider.to_string(), available);
available
}
#[cfg(test)]
pub(crate) fn reset_provider_key_cache() {
if let Some(cache) = PROVIDER_KEY_CACHE.get() {
cache.lock().unwrap().clear();
}
}
fn push_unique(items: &mut Vec<String>, value: impl Into<String>) {
let value = value.into();
if !value.is_empty() && !items.iter().any(|existing| existing == &value) {
items.push(value);
}
}
fn warn_model_tier_fallback(target: &str, requested_provider: Option<&str>, chosen: (&str, &str)) {
let key = format!(
"{target}|{}|{}|{}",
requested_provider.unwrap_or(""),
chosen.0,
chosen.1
);
let cache = MODEL_TIER_WARNING_CACHE.get_or_init(|| Mutex::new(HashSet::new()));
let mut guard = cache.lock().unwrap();
if !guard.insert(key) {
return;
}
drop(guard);
emit_log(
EventLevel::Warn,
"llm",
&format!(
"model_tier '{target}' could not use provider '{}' in the current environment; \
falling back to reachable provider '{}' with model '{}'",
requested_provider.unwrap_or("the default tier mapping"),
chosen.1,
chosen.0
),
BTreeMap::new(),
);
}
fn warn_provider_default_fallback(model_id: &str, provider: &str) {
let key = format!("{model_id}|{provider}");
let cache = PROVIDER_INFERENCE_WARNING_CACHE.get_or_init(|| Mutex::new(HashSet::new()));
let mut guard = cache.lock().unwrap();
if !guard.insert(key) {
return;
}
drop(guard);
crate::events::log_warn_meta(
"llm.provider",
&format!(
"could not infer provider from model id '{model_id}'; falling back to default provider '{provider}'"
),
BTreeMap::from([
(
"model".to_string(),
serde_json::Value::String(model_id.to_string()),
),
(
"provider".to_string(),
serde_json::Value::String(provider.to_string()),
),
(
"reason".to_string(),
serde_json::Value::String("default_provider_fallback".to_string()),
),
]),
);
}
fn infer_provider_from_model_selector(raw_model: &str, warn_on_default: bool) -> String {
use crate::llm::provider::ProviderInferenceSource;
use crate::llm_config;
let (_resolved_model, resolved_provider) = llm_config::resolve_model(raw_model);
if let Some(provider) = resolved_provider {
return provider;
}
let inference = llm_config::infer_provider_detail(raw_model);
if warn_on_default && inference.source == ProviderInferenceSource::DefaultFallback {
warn_provider_default_fallback(raw_model, &inference.provider);
}
inference.provider
}
fn env_selected_model_for_tier() -> Option<(String, String)> {
use crate::llm_config;
let selected_model = std::env::var("HARN_LLM_MODEL")
.ok()
.or_else(|| std::env::var("LOCAL_LLM_MODEL").ok())?;
let selected_provider = std::env::var("HARN_LLM_PROVIDER")
.ok()
.filter(|provider| !provider.is_empty())
.or_else(|| {
if std::env::var("LOCAL_LLM_BASE_URL").is_ok() {
Some("local".to_string())
} else {
None
}
})
.unwrap_or_else(|| llm_config::infer_provider(&selected_model));
if provider_key_available(&selected_provider) {
Some((selected_model, selected_provider))
} else {
None
}
}
fn preferred_provider_order(preferred_provider: Option<&str>) -> Vec<String> {
use crate::llm_config;
let mut providers = Vec::new();
if let Some(provider) = preferred_provider {
push_unique(&mut providers, provider.to_string());
}
if let Ok(provider) = std::env::var("HARN_LLM_PROVIDER") {
push_unique(&mut providers, provider);
}
if std::env::var("LOCAL_LLM_BASE_URL").is_ok() {
push_unique(&mut providers, "local");
}
if let Ok(model) = std::env::var("HARN_LLM_MODEL") {
push_unique(&mut providers, llm_config::infer_provider(&model));
}
if let Ok(model) = std::env::var("LOCAL_LLM_MODEL") {
push_unique(&mut providers, llm_config::infer_provider(&model));
}
for provider in [
"local",
"ollama",
"openrouter",
"together",
"huggingface",
"openai",
"anthropic",
] {
push_unique(&mut providers, provider);
}
providers
}
fn resolve_available_tier_model(
target: &str,
preferred_provider: Option<&str>,
) -> Option<(String, String)> {
use crate::llm_config;
let requested = llm_config::resolve_tier_model(target, preferred_provider);
if let Some((model, provider)) = requested.as_ref() {
if preferred_provider == Some(provider.as_str()) && provider_key_available(provider) {
return Some((model.clone(), provider.clone()));
}
}
if let Some((model, provider)) = env_selected_model_for_tier() {
if requested
.as_ref()
.map(|(_, requested_provider)| requested_provider != &provider)
.unwrap_or(true)
{
warn_model_tier_fallback(
target,
requested.as_ref().map(|(_, provider)| provider.as_str()),
(&model, &provider),
);
}
return Some((model, provider));
}
let candidates = llm_config::tier_candidates(target);
for provider in preferred_provider_order(preferred_provider) {
if !provider_key_available(&provider) {
continue;
}
if let Some((model, candidate_provider)) = candidates
.iter()
.find(|(_, candidate_provider)| candidate_provider == &provider)
{
if requested
.as_ref()
.map(|(_, requested_provider)| requested_provider != candidate_provider)
.unwrap_or(true)
{
warn_model_tier_fallback(
target,
requested.as_ref().map(|(_, provider)| provider.as_str()),
(model, candidate_provider),
);
}
return Some((model.clone(), candidate_provider.clone()));
}
}
if let Some((model, provider)) = requested.as_ref() {
if provider_key_available(provider) {
return Some((model.clone(), provider.clone()));
}
}
requested
}
pub(crate) fn vm_resolve_provider(options: &Option<BTreeMap<String, VmValue>>) -> String {
use crate::llm_config;
if let Some(p) = options
.as_ref()
.and_then(|o| o.get("provider"))
.map(|v| v.display())
{
if !p.eq_ignore_ascii_case("auto") {
return p;
}
if let Some(m) = options
.as_ref()
.and_then(|o| o.get("model"))
.map(|v| v.display())
{
return infer_provider_from_model_selector(&m, true);
}
}
if let Ok(p) = std::env::var("HARN_LLM_PROVIDER") {
return p;
}
if std::env::var("LOCAL_LLM_BASE_URL").is_ok()
&& (options.as_ref().and_then(|o| o.get("model")).is_some()
|| std::env::var("HARN_LLM_MODEL").is_ok()
|| std::env::var("LOCAL_LLM_MODEL").is_ok())
{
return "local".to_string();
}
if let Some(m) = options
.as_ref()
.and_then(|o| o.get("model"))
.map(|v| v.display())
{
return infer_provider_from_model_selector(&m, true);
}
if let Some(tier) = options
.as_ref()
.and_then(|o| o.get("model_tier"))
.map(|v| v.display())
{
if let Some((_, provider)) = resolve_available_tier_model(&tier, None) {
return provider;
}
}
if let Ok(m) = std::env::var("HARN_LLM_MODEL") {
return infer_provider_from_model_selector(&m, true);
}
let default = llm_config::default_provider();
if provider_key_available(&default) {
return default;
}
for fallback in ["ollama", "local"] {
if provider_key_available(fallback) {
return fallback.to_string();
}
}
default
}
pub(crate) fn vm_resolve_model(
options: &Option<BTreeMap<String, VmValue>>,
provider: &str,
) -> String {
use crate::llm_config;
if let Some(raw) = options
.as_ref()
.and_then(|o| o.get("model"))
.map(|v| v.display())
{
let (resolved, _) = llm_config::resolve_model(&raw);
return resolved;
}
if let Some(tier) = options
.as_ref()
.and_then(|o| o.get("model_tier"))
.map(|v| v.display())
{
if let Some((resolved, _)) = resolve_available_tier_model(&tier, Some(provider)) {
return resolved;
}
}
if let Ok(raw) = std::env::var("HARN_LLM_MODEL") {
let (resolved, resolved_provider) = llm_config::resolve_model(&raw);
let env_provider = std::env::var("HARN_LLM_PROVIDER").ok();
if resolved_provider.as_deref() == Some(provider)
|| (resolved_provider.is_none() && env_provider.as_deref() == Some(provider))
{
return resolved;
}
}
if provider == "local" {
if let Ok(raw) = std::env::var("LOCAL_LLM_MODEL") {
let (resolved, _) = llm_config::resolve_model(&raw);
return resolved;
}
}
llm_config::default_model_for_provider(provider)
}
pub fn no_credentials_message() -> String {
use crate::llm_config;
let mut envs: Vec<String> = Vec::new();
for name in llm_config::provider_names() {
if let Some(def) = llm_config::provider_config(&name) {
if def.auth_style == "none" {
continue;
}
for env in llm_config::auth_env_names(&def.auth_env) {
if !envs.contains(&env) {
envs.push(env);
}
}
}
}
envs.sort();
envs.dedup();
let env_list = if envs.is_empty() {
"(no providers declared)".to_string()
} else {
envs.join(", ")
};
format!(
"No LLM providers configured. Set one of these env vars: {env_list} (or run a local Ollama). \
For diagnostics: `harn doctor`. For a recommended setup: `harn models recommend` (when available)."
)
}
pub fn resolve_api_key(provider: &str) -> Result<String, VmError> {
use crate::llm_config;
if provider == "mock"
|| crate::llm::mock::cli_llm_mock_replay_active()
|| crate::llm::mock::builtin_llm_mock_active()
{
return Ok(String::new());
}
if matches!(provider, "bedrock" | "vertex") {
return Ok(String::new());
}
let selection_hint = {
let config_path = llm_config::loaded_config_path()
.map(|p| p.display().to_string())
.unwrap_or_else(|| "<built-in defaults>".to_string());
format!(
" (provider '{provider}' selected via LLM_PROVIDER / llm.toml @ {config_path}; \
set HARN_LLM_PROVIDER=mock or LLM_PROVIDER=mock for offline use)"
)
};
if let Some(pdef) = llm_config::provider_config(provider) {
if pdef.auth_style == "none" {
return Ok(String::new());
}
let aggregate_hint = no_credentials_message();
match &pdef.auth_env {
llm_config::AuthEnv::Single(env) => {
return std::env::var(env).map_err(|_| {
VmError::Thrown(VmValue::String(Rc::from(format!(
"Missing API key: set {env} environment variable{selection_hint}\n{aggregate_hint}"
))))
});
}
llm_config::AuthEnv::Multiple(envs) => {
for env in envs {
if let Ok(val) = std::env::var(env) {
if !val.is_empty() {
return Ok(val);
}
}
}
return Err(VmError::Thrown(VmValue::String(Rc::from(format!(
"Missing API key: set one of {} environment variables{selection_hint}\n{aggregate_hint}",
envs.join(", ")
)))));
}
llm_config::AuthEnv::None => return Ok(String::new()),
}
}
let aggregate_hint = no_credentials_message();
std::env::var("ANTHROPIC_API_KEY").map_err(|_| {
VmError::Thrown(VmValue::String(Rc::from(format!(
"Missing API key: set ANTHROPIC_API_KEY environment variable{selection_hint}\n{aggregate_hint}"
))))
})
}
pub(crate) struct ResolvedProvider {
pub pdef: Option<crate::llm_config::ProviderDef>,
pub is_anthropic_style: bool,
pub base_url: String,
pub endpoint: String,
}
impl ResolvedProvider {
pub fn resolve(provider: &str) -> ResolvedProvider {
let pdef = crate::llm_config::provider_config(provider);
let is_anthropic_style = pdef
.as_ref()
.map(|p| p.chat_endpoint.contains("/messages"))
.unwrap_or(provider == "anthropic");
let (default_base, default_endpoint) = if is_anthropic_style {
("https://api.anthropic.com/v1", "/messages")
} else {
("https://api.openai.com/v1", "/chat/completions")
};
let base_url = pdef
.as_ref()
.map(crate::llm_config::resolve_base_url)
.unwrap_or_else(|| default_base.to_string());
let endpoint = pdef
.as_ref()
.map(|p| p.chat_endpoint.clone())
.unwrap_or_else(|| default_endpoint.to_string());
ResolvedProvider {
pdef,
is_anthropic_style,
base_url,
endpoint,
}
}
pub fn url(&self) -> String {
format!("{}{}", self.base_url, self.endpoint)
}
pub fn apply_headers(
&self,
mut req: reqwest::RequestBuilder,
api_key: &str,
) -> reqwest::RequestBuilder {
req = crate::llm::api::apply_auth_headers(req, api_key, self.pdef.as_ref());
if let Some(p) = self.pdef.as_ref() {
for (k, v) in &p.extra_headers {
req = req.header(k.as_str(), v.as_str());
}
}
req
}
}
#[cfg(test)]
mod no_credentials_tests {
use super::no_credentials_message;
#[test]
fn message_includes_canonical_env_vars_and_doctor_hint() {
let msg = no_credentials_message();
assert!(
msg.contains("ANTHROPIC_API_KEY"),
"expected ANTHROPIC_API_KEY in: {msg}"
);
assert!(
msg.contains("OPENAI_API_KEY"),
"expected OPENAI_API_KEY in: {msg}"
);
assert!(msg.contains("harn doctor"));
assert!(msg.contains("harn models recommend"));
assert!(msg.contains("local Ollama"));
}
}