use std::rc::Rc;
use crate::llm::daemon::{watch_state, DaemonLoopConfig};
use crate::value::{VmError, VmValue};
use super::super::agent_config::AgentLoopConfig;
use super::super::agent_tools::{
merge_agent_loop_approval_policy, merge_agent_loop_policy, normalize_native_tools_for_format,
normalize_tool_choice_for_format, normalize_tool_examples_for_format, ToolCallTracker,
};
use super::super::tools::build_tool_calling_contract_prompt;
pub(super) struct TranscriptIterationGuard;
impl Drop for TranscriptIterationGuard {
fn drop(&mut self) {
crate::llm::agent_observe::set_current_iteration(None);
}
}
pub(super) struct ExecutionPolicyGuard {
pub(super) active: bool,
}
impl Drop for ExecutionPolicyGuard {
fn drop(&mut self) {
if self.active {
crate::orchestration::pop_execution_policy();
}
}
}
pub(super) struct ApprovalPolicyGuard {
pub(super) active: bool,
}
impl Drop for ApprovalPolicyGuard {
fn drop(&mut self) {
if self.active {
crate::orchestration::pop_approval_policy();
}
}
}
pub(super) struct DynamicPermissionPolicyGuard {
pub(super) active: bool,
}
impl Drop for DynamicPermissionPolicyGuard {
fn drop(&mut self) {
if self.active {
crate::llm::permissions::pop_dynamic_permission_policy();
}
}
}
struct VmValueOwned(crate::value::VmValue);
fn skill_whitelist_matches_entry(
entry: &std::collections::BTreeMap<String, crate::value::VmValue>,
allowed: &std::collections::BTreeSet<String>,
) -> bool {
let name = entry.get("name").map(|v| v.display()).unwrap_or_default();
if allowed.contains(&name) {
return true;
}
let namespace = entry.get("namespace").and_then(|v| match v {
crate::value::VmValue::String(s) if !s.is_empty() => Some(s.to_string()),
_ => None,
});
if let Some(ns) = namespace {
if allowed.iter().any(|a| {
a.strip_prefix("namespace:")
.is_some_and(|tag| tag == ns.as_str())
}) {
return true;
}
}
false
}
fn filter_deferred_from_tools_val(
tools_val: Option<&crate::value::VmValue>,
client: &ClientToolSearchState,
) -> Option<crate::value::VmValue> {
use crate::value::VmValue;
use std::rc::Rc;
let tools_val = tools_val?;
let dict = tools_val.as_dict()?;
let tools_list = match dict.get("tools") {
Some(VmValue::List(list)) => list,
_ => return None,
};
let mut kept: Vec<VmValue> = Vec::with_capacity(tools_list.len());
for entry in tools_list.iter() {
let is_hidden = match entry {
VmValue::Dict(d) => {
let is_deferred = matches!(d.get("defer_loading"), Some(VmValue::Bool(true)));
if !is_deferred {
false
} else {
let name = d.get("name").map(|v| v.display()).unwrap_or_default();
!(client.always_loaded.contains(&name) || client.promoted_set.contains(&name))
}
}
_ => false,
};
if !is_hidden {
kept.push(entry.clone());
}
}
let mut new_dict = dict.clone();
new_dict.insert("tools".to_string(), VmValue::List(Rc::new(kept)));
Some(VmValue::Dict(Rc::new(new_dict)))
}
pub(super) struct ClientToolSearchState {
pub(super) synthetic_name: String,
pub(super) strategy: crate::llm::api::ToolSearchStrategy,
pub(super) variant: crate::llm::api::ToolSearchVariant,
#[allow(dead_code)]
pub(super) always_loaded: std::collections::BTreeSet<String>,
pub(super) budget_tokens: Option<i64>,
pub(super) deferred_bodies: std::collections::BTreeMap<String, serde_json::Value>,
pub(super) promoted_order: Vec<String>,
pub(super) promoted_set: std::collections::BTreeSet<String>,
pub(super) promoted_token_estimate: std::collections::BTreeMap<String, i64>,
}
impl ClientToolSearchState {
pub(super) fn estimate_tokens(body: &serde_json::Value) -> i64 {
let s = serde_json::to_string(body).unwrap_or_default();
((s.len() as f64) / 4.0).ceil() as i64
}
pub(super) fn current_token_total(&self) -> i64 {
self.promoted_token_estimate.values().copied().sum()
}
}
pub(super) struct SessionSinkGuard {
pub(super) session_id: String,
}
impl Drop for SessionSinkGuard {
fn drop(&mut self) {
if !self.session_id.is_empty() {
crate::agent_events::clear_session_sinks(&self.session_id);
}
}
}
pub(super) struct CurrentSessionGuard {
pub(super) session_id: String,
}
impl Drop for CurrentSessionGuard {
fn drop(&mut self) {
if !self.session_id.is_empty() {
crate::agent_sessions::pop_current_session();
}
}
}
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
pub enum SkillMatchStrategy {
#[default]
Metadata,
Host,
Embedding,
}
impl SkillMatchStrategy {
pub fn parse(s: &str) -> Self {
match s.trim().to_ascii_lowercase().as_str() {
"host" => Self::Host,
"embedding" => Self::Embedding,
"metadata" | "" => Self::Metadata,
other => {
crate::events::log_warn(
"agent.skill_match",
&format!("unknown strategy '{other}', falling back to 'metadata'"),
);
Self::Metadata
}
}
}
pub fn as_str(&self) -> &'static str {
match self {
Self::Metadata => "metadata",
Self::Host => "host",
Self::Embedding => "embedding",
}
}
}
#[derive(Clone, Debug)]
pub struct SkillMatchConfig {
pub strategy: SkillMatchStrategy,
pub top_n: usize,
pub sticky: bool,
}
impl Default for SkillMatchConfig {
fn default() -> Self {
Self {
strategy: SkillMatchStrategy::Metadata,
top_n: 1,
sticky: true,
}
}
}
#[derive(Clone, Debug, Default)]
#[allow(dead_code)]
pub struct ActiveSkill {
pub name: String,
pub description: String,
pub prompt: Option<String>,
pub when_to_use: String,
pub paths: Vec<String>,
pub allowed_tools: Vec<String>,
pub mcp_servers: Vec<String>,
pub model: Option<String>,
pub effort: Option<String>,
pub invocation: String,
pub disable_model_invocation: bool,
pub user_invocable: bool,
}
impl ActiveSkill {
pub(crate) fn from_entry(entry: &VmValue) -> Self {
let Some(dict) = entry.as_dict() else {
return Self::default();
};
let list_strings = |v: Option<&VmValue>| -> Vec<String> {
match v {
Some(VmValue::List(list)) => list.iter().map(|x| x.display()).collect(),
_ => Vec::new(),
}
};
let non_empty = |v: Option<&VmValue>| -> Option<String> {
v.map(|value| value.display()).filter(|s| !s.is_empty())
};
let bool_with = |keys: &[&str], default: bool| -> bool {
keys.iter()
.find_map(|key| dict.get(*key))
.map(|v| matches!(v, VmValue::Bool(true)))
.unwrap_or(default)
};
Self {
name: dict.get("name").map(|v| v.display()).unwrap_or_default(),
description: dict
.get("description")
.map(|v| v.display())
.unwrap_or_default(),
prompt: non_empty(dict.get("prompt")),
when_to_use: dict
.get("when_to_use")
.map(|v| v.display())
.unwrap_or_default(),
paths: list_strings(dict.get("paths")),
allowed_tools: list_strings(dict.get("allowed_tools")),
mcp_servers: {
let mut servers = list_strings(dict.get("mcp"));
for extra in list_strings(dict.get("requires_mcp")) {
if !servers.contains(&extra) {
servers.push(extra);
}
}
servers
},
model: non_empty(dict.get("model")),
effort: non_empty(dict.get("effort")),
invocation: dict
.get("invocation")
.map(|v| v.display())
.unwrap_or_default(),
disable_model_invocation: bool_with(
&["disable-model-invocation", "disable_model_invocation"],
false,
),
user_invocable: bool_with(&["user-invocable", "user_invocable"], true),
}
}
pub(crate) fn is_disabled_for_model(entry: &VmValue) -> bool {
let Some(dict) = entry.as_dict() else {
return false;
};
matches!(
dict.get("disable-model-invocation")
.or_else(|| dict.get("disable_model_invocation")),
Some(VmValue::Bool(true))
)
}
}
fn rehydrate_active_skills(
anonymous: bool,
session_id: &str,
registry: Option<&crate::value::VmValue>,
) -> Vec<ActiveSkill> {
if anonymous {
return Vec::new();
}
let Some(registry) = registry else {
return Vec::new();
};
let Some(dict) = registry.as_dict() else {
return Vec::new();
};
let skills: Vec<crate::value::VmValue> = match dict.get("skills") {
Some(crate::value::VmValue::List(list)) => list.iter().cloned().collect(),
_ => Vec::new(),
};
let names = crate::agent_sessions::active_skills(session_id);
if names.is_empty() {
return Vec::new();
}
let mut out = Vec::new();
for name in names {
let Some(entry) = skills.iter().find(|s| {
s.as_dict()
.and_then(|d| d.get("name"))
.map(|v| v.display() == name)
.unwrap_or(false)
}) else {
continue;
};
out.push(ActiveSkill::from_entry(entry));
}
out
}
pub(super) struct AgentLoopState {
pub(super) config: AgentLoopConfig,
pub(super) session_id: String,
pub(super) anonymous_session: bool,
pub(super) tool_contract_prompt: Option<String>,
pub(super) base_system: Option<String>,
pub(super) persistent_system_prompt: Option<String>,
pub(super) has_tools: bool,
pub(super) visible_messages: Vec<serde_json::Value>,
pub(super) recorded_messages: Vec<serde_json::Value>,
pub(super) transcript_events: Vec<crate::value::VmValue>,
pub(super) transcript_summary: Option<String>,
pub(super) total_text: String,
pub(super) last_iteration_text: String,
pub(super) task_ledger: crate::llm::ledger::TaskLedger,
pub(super) ledger_done_rejections: usize,
pub(super) loop_tracker: ToolCallTracker,
pub(super) loop_detect_enabled: bool,
pub(super) total_iterations: usize,
pub(super) resumed_iterations: usize,
pub(super) consecutive_text_only: usize,
pub(super) consecutive_single_tool_turns: usize,
pub(super) idle_backoff_ms: u64,
pub(super) last_run_exit_code: Option<i32>,
pub(super) all_tools_used: Vec<String>,
pub(super) successful_tools_used: Vec<String>,
pub(super) rejected_tools: Vec<String>,
pub(super) permission_session_grants: std::collections::BTreeSet<String>,
pub(super) deferred_user_messages: Vec<String>,
pub(super) daemon_state: String,
pub(super) daemon_snapshot_path: Option<String>,
pub(super) daemon_watch_state: std::collections::BTreeMap<String, u64>,
pub(super) native_text_tool_fallbacks: usize,
pub(super) native_text_tool_fallback_rejections: usize,
pub(super) final_status: &'static str,
pub(super) loop_start: std::time::Instant,
pub(super) bridge: Option<Rc<crate::bridge::HostBridge>>,
pub(super) tool_format: String,
pub(super) done_sentinel: String,
pub(super) break_unless_phase: Option<String>,
pub(super) max_iterations: usize,
pub(super) max_nudges: usize,
pub(super) tool_retries: usize,
pub(super) tool_backoff_ms: u64,
pub(super) exit_when_verified: bool,
pub(super) persistent: bool,
pub(super) daemon: bool,
pub(super) auto_compact: Option<crate::orchestration::AutoCompactConfig>,
pub(super) daemon_config: DaemonLoopConfig,
pub(super) custom_nudge: Option<String>,
pub(super) tool_search_client: Option<ClientToolSearchState>,
pub(crate) skill_registry: Option<VmValue>,
pub(crate) skill_match: SkillMatchConfig,
pub(crate) working_files: Vec<String>,
pub(crate) active_skills: Vec<ActiveSkill>,
pub(crate) loaded_skills: Vec<ActiveSkill>,
pub(super) native_tools_snapshot: Option<Vec<serde_json::Value>>,
pub(crate) rehydrated_from_session: bool,
pub(super) _permission_guard: DynamicPermissionPolicyGuard,
pub(super) _approval_guard: ApprovalPolicyGuard,
pub(super) _policy_guard: ExecutionPolicyGuard,
pub(super) _sink_guard: SessionSinkGuard,
pub(super) _current_session_guard: CurrentSessionGuard,
pub(super) _iteration_guard: TranscriptIterationGuard,
}
impl AgentLoopState {
pub(super) fn sync_session_store(&self) {
if self.anonymous_session {
return;
}
let transcript_vm = crate::llm::helpers::transcript_to_vm_with_events(
Some(self.session_id.clone()),
self.transcript_summary.clone(),
None,
&self.visible_messages,
self.transcript_events.clone(),
Vec::new(),
Some("active"),
);
crate::agent_sessions::store_transcript(&self.session_id, transcript_vm);
}
pub(crate) fn skill_allowed_tools(&self) -> std::collections::BTreeSet<String> {
self.active_skills
.iter()
.chain(self.loaded_skills.iter())
.filter(|s| !s.allowed_tools.is_empty())
.flat_map(|s| s.allowed_tools.iter().cloned())
.collect()
}
pub(crate) fn prompt_active_skills(&self) -> Vec<ActiveSkill> {
let mut merged: Vec<ActiveSkill> = self.active_skills.clone();
for loaded in &self.loaded_skills {
if let Some(existing) = merged.iter_mut().find(|skill| skill.name == loaded.name) {
*existing = loaded.clone();
} else {
merged.push(loaded.clone());
}
}
merged
}
pub(crate) fn skill_scoped_tools_val(
&self,
tools_val: Option<&crate::value::VmValue>,
) -> Option<crate::value::VmValue> {
use crate::value::VmValue;
use std::rc::Rc;
let tools_val = tools_val?;
let dict = tools_val.as_dict()?;
let allowed = self.skill_allowed_tools();
if allowed.is_empty() {
return None;
}
if allowed.iter().any(|a| a == "*") {
return None;
}
let tools_list = match dict.get("tools") {
Some(VmValue::List(list)) => list,
_ => return None,
};
let mut kept: Vec<VmValue> = Vec::with_capacity(tools_list.len());
for entry in tools_list.iter() {
let keep = match entry {
VmValue::Dict(d) => skill_whitelist_matches_entry(d, &allowed),
_ => false,
};
if keep {
kept.push(entry.clone());
}
}
let mut new_dict = dict.clone();
new_dict.insert("tools".to_string(), VmValue::List(Rc::new(kept)));
Some(VmValue::Dict(Rc::new(new_dict)))
}
pub(super) fn rebuild_scoped_native_tools(&self, opts: &mut crate::llm::api::LlmCallOptions) {
let Some(snapshot) = self.native_tools_snapshot.as_ref() else {
return;
};
let allowed = self.skill_allowed_tools();
if allowed.is_empty() || allowed.iter().any(|a| a == "*") {
opts.native_tools = Some(snapshot.clone());
return;
}
let filtered: Vec<serde_json::Value> = snapshot
.iter()
.filter(|entry| {
let name = entry
.get("name")
.and_then(|v| v.as_str())
.or_else(|| {
entry
.get("function")
.and_then(|f| f.get("name"))
.and_then(|v| v.as_str())
})
.unwrap_or("");
if name.starts_with("__harn_") || name == "load_skill" || allowed.contains(name) {
return true;
}
let ns = entry.get("namespace").and_then(|v| v.as_str()).or_else(|| {
entry
.get("function")
.and_then(|f| f.get("namespace"))
.and_then(|v| v.as_str())
});
if let Some(ns) = ns {
if allowed
.iter()
.any(|a| a.strip_prefix("namespace:").is_some_and(|tag| tag == ns))
{
return true;
}
}
false
})
.cloned()
.collect();
opts.native_tools = Some(filtered);
}
pub(super) fn rebuild_tool_contract_prompt(
&self,
opts: &crate::llm::api::LlmCallOptions,
) -> Option<String> {
let client = self.tool_search_client.as_ref()?;
if !self.has_tools {
return None;
}
let filtered = filter_deferred_from_tools_val(self.config_tools_val(opts).as_ref(), client);
let tools_owned;
let tools_val_borrow: Option<&crate::value::VmValue> = match (filtered, opts.tools.as_ref())
{
(Some(v), _) => {
tools_owned = Some(v);
tools_owned.as_ref()
}
(None, opt) => opt,
};
let native_tools_for_prompt = self.rebuild_native_tools_for_prompt(opts);
let mut prompt = crate::llm::tools::build_tool_calling_contract_prompt(
tools_val_borrow,
native_tools_for_prompt.as_deref(),
&self.tool_format,
self.config
.turn_policy
.as_ref()
.is_some_and(|policy| policy.require_action_or_yield),
self.config.tool_examples.as_deref(),
!self.config.task_ledger.is_empty(),
);
if let Some(client_cfg) = opts.tool_search.as_ref().filter(|c| c.include_stub_listing) {
let mut stub_lines = Vec::new();
for (name, body) in &client_cfg.deferred_bodies {
if client.promoted_set.contains(name) || client.always_loaded.contains(name) {
continue; }
let description = body
.get("description")
.and_then(|v| v.as_str())
.or_else(|| {
body.get("function")
.and_then(|f| f.get("description"))
.and_then(|v| v.as_str())
})
.unwrap_or("")
.split(['\n', '.'])
.next()
.unwrap_or("")
.trim();
if description.is_empty() {
stub_lines.push(format!("- `{name}`"));
} else {
stub_lines.push(format!("- `{name}` — {description}"));
}
}
if !stub_lines.is_empty() {
prompt.push_str(&format!(
"\n\n## Tools available via `{search_name}` (deferred)\n\n\
Call `{search_name}` with a query to surface any of:\n\n{list}\n",
search_name = client.synthetic_name,
list = stub_lines.join("\n"),
));
}
}
Some(prompt)
}
fn config_tools_val(
&self,
opts: &crate::llm::api::LlmCallOptions,
) -> Option<crate::value::VmValue> {
opts.tools.clone()
}
fn rebuild_native_tools_for_prompt(
&self,
opts: &crate::llm::api::LlmCallOptions,
) -> Option<Vec<serde_json::Value>> {
let client = self.tool_search_client.as_ref()?;
let mut merged: Vec<serde_json::Value> = Vec::new();
if let Some(native) = opts.native_tools.as_ref() {
merged.extend(native.iter().cloned());
} else if let Some(cfg) = opts.tool_search.as_ref() {
merged.push(crate::llm::tools::build_client_search_tool_schema(
&opts.provider,
cfg,
));
}
let seen: std::collections::BTreeSet<String> = merged
.iter()
.filter_map(|t| {
t.get("name")
.and_then(|v| v.as_str())
.or_else(|| {
t.get("function")
.and_then(|f| f.get("name"))
.and_then(|v| v.as_str())
})
.map(String::from)
})
.collect();
for name in &client.promoted_order {
if seen.contains(name) {
continue;
}
if let Some(body) = client.deferred_bodies.get(name) {
merged.push(body.clone());
}
}
Some(merged)
}
pub(super) fn new(
opts: &mut crate::llm::api::LlmCallOptions,
config: AgentLoopConfig,
) -> Result<Self, VmError> {
crate::llm::agent_observe::reset_transcript_dedup();
let _iteration_guard = TranscriptIterationGuard;
let bridge = super::current_host_bridge();
let max_iterations = config.max_iterations;
let persistent = config.persistent;
let max_nudges = config.max_nudges;
let config_skill_registry = config.skill_registry.clone();
let config_skill_match = config.skill_match.clone();
let config_working_files = config.working_files.clone();
let custom_nudge = config.nudge.clone();
let done_sentinel = config
.done_sentinel
.clone()
.unwrap_or_else(|| "##DONE##".to_string());
let break_unless_phase = config.break_unless_phase.clone();
let tool_retries = config.tool_retries;
let tool_backoff_ms = config.tool_backoff_ms;
let tool_format = config.tool_format.clone();
let mut transcript_summary = opts.transcript_summary.clone();
let (session_id, anonymous_session) = if config.session_id.trim().is_empty() {
(format!("agent_session_{}", uuid::Uuid::now_v7()), true)
} else {
let resolved = crate::agent_sessions::open_or_create(Some(config.session_id.clone()));
(resolved, false)
};
if !anonymous_session {
let prior = crate::agent_sessions::prompt_state_json(&session_id);
if transcript_summary.is_none() {
transcript_summary = prior.summary.clone();
}
if !prior.messages.is_empty() {
let caller_msgs = std::mem::take(&mut opts.messages);
opts.messages = prior.messages;
opts.messages.extend(caller_msgs);
}
}
let _sink_guard = SessionSinkGuard {
session_id: session_id.clone(),
};
if !session_id.is_empty() {
crate::agent_sessions::push_current_session(session_id.clone());
}
let _current_session_guard = CurrentSessionGuard {
session_id: session_id.clone(),
};
let auto_compact = config.auto_compact.clone();
let daemon = config.daemon;
let daemon_config = config.daemon_config.clone();
let exit_when_verified = config.exit_when_verified;
let last_run_exit_code: Option<i32> = None;
let loop_detect_enabled = config.loop_detect_warn > 0;
let loop_tracker = ToolCallTracker::new(
config.loop_detect_warn,
config.loop_detect_block,
config.loop_detect_skip,
);
let effective_policy = merge_agent_loop_policy(config.policy.clone())?;
if let Some(ref policy) = effective_policy {
crate::orchestration::push_execution_policy(policy.clone());
}
let _policy_guard = ExecutionPolicyGuard {
active: effective_policy.is_some(),
};
let effective_approval_policy =
merge_agent_loop_approval_policy(config.approval_policy.clone());
if let Some(ref policy) = effective_approval_policy {
crate::orchestration::push_approval_policy(policy.clone());
}
let _approval_guard = ApprovalPolicyGuard {
active: effective_approval_policy.is_some(),
};
let effective_permissions = config.permissions.clone();
if let Some(ref permissions) = effective_permissions {
crate::llm::permissions::push_dynamic_permission_policy(permissions.clone());
}
let _permission_guard = DynamicPermissionPolicyGuard {
active: effective_permissions.is_some(),
};
let has_skill_registry = config_skill_registry
.as_ref()
.and_then(|value| value.as_dict())
.and_then(|dict| dict.get("skills"))
.and_then(|value| match value {
crate::value::VmValue::List(skills) => Some(skills),
_ => None,
})
.is_some_and(|skills| !skills.is_empty());
if has_skill_registry {
let schema = crate::llm::tools::build_load_skill_tool_schema(&opts.provider);
match opts.native_tools.as_mut() {
Some(native_tools) => {
let already_present = native_tools.iter().any(|tool| {
tool.get("name").and_then(|v| v.as_str()).or_else(|| {
tool.get("function")
.and_then(|function| function.get("name"))
.and_then(|v| v.as_str())
}) == Some("load_skill")
});
if !already_present {
native_tools.insert(0, schema);
}
}
None => opts.native_tools = Some(vec![schema]),
}
}
let tools_owned = opts.tools.clone();
let tools_val = tools_owned.as_ref();
let tool_search_client = opts
.tool_search
.as_ref()
.filter(|cfg| {
cfg.mode == crate::llm::api::ToolSearchMode::Client
|| (cfg.mode == crate::llm::api::ToolSearchMode::Auto
&& !crate::llm::provider::provider_supports_defer_loading(
&opts.provider,
&opts.model,
))
})
.map(|cfg| ClientToolSearchState {
synthetic_name: cfg.effective_name().to_string(),
strategy: cfg.effective_strategy(),
variant: cfg.variant,
always_loaded: cfg.always_loaded.iter().cloned().collect(),
budget_tokens: cfg.budget_tokens,
deferred_bodies: cfg.deferred_bodies.clone(),
promoted_order: Vec::new(),
promoted_set: std::collections::BTreeSet::new(),
promoted_token_estimate: std::collections::BTreeMap::new(),
});
let native_tools_for_prompt = opts.native_tools.clone();
opts.native_tools =
normalize_native_tools_for_format(&tool_format, opts.native_tools.clone());
let native_tools_snapshot = opts.native_tools.clone();
opts.tool_choice = normalize_tool_choice_for_format(
&opts.provider,
&tool_format,
opts.native_tools.as_deref(),
opts.tool_choice.clone(),
config.turn_policy.as_ref(),
);
let tools_val_for_prompt = tool_search_client
.as_ref()
.and_then(|client| filter_deferred_from_tools_val(tools_val, client))
.map(VmValueOwned);
let tools_val_borrow = tools_val_for_prompt.as_ref().map(|v| &v.0).or(tools_val);
let rendered_schemas = crate::llm::tools::collect_tool_schemas(
tools_val_borrow,
native_tools_for_prompt.as_deref(),
);
let has_tools = !rendered_schemas.is_empty();
let base_system = opts.system.clone();
let tool_examples =
normalize_tool_examples_for_format(&tool_format, config.tool_examples.clone());
let tool_contract_prompt = if has_tools {
let mut prompt = build_tool_calling_contract_prompt(
tools_val_borrow,
native_tools_for_prompt.as_deref(),
&tool_format,
config
.turn_policy
.as_ref()
.is_some_and(|policy| policy.require_action_or_yield),
tool_examples.as_deref(),
!config.task_ledger.is_empty(),
);
if let Some(client_cfg) = opts.tool_search.as_ref().filter(|c| {
c.include_stub_listing
&& (c.mode == crate::llm::api::ToolSearchMode::Client
|| (c.mode == crate::llm::api::ToolSearchMode::Auto
&& !crate::llm::provider::provider_supports_defer_loading(
&opts.provider,
&opts.model,
)))
}) {
let mut stub_lines = Vec::new();
for (name, body) in &client_cfg.deferred_bodies {
let description = body
.get("description")
.and_then(|v| v.as_str())
.or_else(|| {
body.get("function")
.and_then(|f| f.get("description"))
.and_then(|v| v.as_str())
})
.unwrap_or("")
.split(['\n', '.'])
.next()
.unwrap_or("")
.trim();
if description.is_empty() {
stub_lines.push(format!("- `{name}`"));
} else {
stub_lines.push(format!("- `{name}` — {description}"));
}
}
if !stub_lines.is_empty() {
prompt.push_str(&format!(
"\n\n## Tools available via `{search_name}` (deferred)\n\n\
Call `{search_name}` with a query to surface any of:\n\n{list}\n",
search_name = client_cfg.effective_name(),
list = stub_lines.join("\n"),
));
}
}
Some(prompt)
} else {
None
};
let allow_done_sentinel = config
.turn_policy
.as_ref()
.map(|policy| policy.allow_done_sentinel)
.unwrap_or(true);
let done_instruction = if tool_format == "native" || !has_tools {
format!("include `{done_sentinel}` exactly once in assistant text")
} else {
format!("emit `<done>{done_sentinel}</done>` as its own top-level block")
};
let persistent_system_prompt = if persistent {
let progress_instruction = if has_tools {
"Take action with tool calls — do not stop to explain."
} else {
"Solve the request directly in assistant text — do not stop early to explain or summarize."
};
if exit_when_verified {
if allow_done_sentinel {
Some(format!(
"\n\nKeep working until the current request is complete. {progress_instruction} {} only after the current request passes verification.",
done_instruction,
))
} else {
Some(format!(
"\n\nKeep working until the current request is complete. {progress_instruction}"
))
}
} else if allow_done_sentinel {
Some(format!(
"\n\nIMPORTANT: You MUST keep working until the current request is complete. \
{progress_instruction} \
When the requested work is complete, {}.",
done_instruction
))
} else {
Some(format!(
"\n\nIMPORTANT: You MUST keep working until the current request is complete. \
{progress_instruction}"
))
}
} else {
None
};
let mut visible_messages = opts.messages.clone();
let mut recorded_messages = opts.messages.clone();
for message in &opts.messages {
crate::llm::agent_observe::emit_message_event(message);
}
let mut total_text = String::new();
let mut last_iteration_text = String::new();
let consecutive_text_only = 0usize;
let consecutive_single_tool_turns = 0usize;
let task_ledger = config.task_ledger.clone();
let ledger_done_rejections = 0usize;
let mut all_tools_used: Vec<String> = Vec::new();
let successful_tools_used: Vec<String> = Vec::new();
let mut rejected_tools: Vec<String> = Vec::new();
let mut deferred_user_messages: Vec<String> = Vec::new();
let mut total_iterations = 0usize;
let final_status = "done";
let loop_start = std::time::Instant::now();
let mut transcript_events: Vec<crate::value::VmValue> = Vec::new();
let mut idle_backoff_ms = 100u64;
let mut daemon_state = if daemon {
"active".to_string()
} else {
"done".to_string()
};
let mut daemon_snapshot_path: Option<String> = None;
let mut daemon_watch_state = watch_state(&daemon_config.watch_paths);
let native_text_tool_fallbacks = 0usize;
let native_text_tool_fallback_rejections = 0usize;
let mut resumed_iterations = 0usize;
let mut last_run_exit_code = last_run_exit_code;
if daemon {
if let Some(path) = daemon_config.resume_path.as_deref() {
let snapshot = crate::llm::daemon::load_snapshot(path)?;
daemon_state = snapshot.daemon_state.clone();
visible_messages = snapshot.visible_messages;
recorded_messages = snapshot.recorded_messages;
transcript_summary = snapshot.transcript_summary;
transcript_events = snapshot
.transcript_events
.iter()
.map(crate::stdlib::json_to_vm_value)
.collect();
total_text = snapshot.total_text;
last_iteration_text = snapshot.last_iteration_text;
all_tools_used = snapshot.all_tools_used;
rejected_tools = snapshot.rejected_tools;
deferred_user_messages = snapshot.deferred_user_messages;
resumed_iterations = snapshot.total_iterations;
total_iterations = resumed_iterations;
idle_backoff_ms = snapshot.idle_backoff_ms.max(1);
last_run_exit_code = snapshot.last_run_exit_code;
daemon_watch_state = if snapshot.watch_state.is_empty() {
watch_state(&daemon_config.watch_paths)
} else {
snapshot.watch_state
};
daemon_snapshot_path = Some(path.to_string());
} else if let Some(path) = daemon_config.effective_persist_path() {
daemon_snapshot_path = Some(path.to_string());
}
}
let rehydrated_active_skills = rehydrate_active_skills(
anonymous_session,
&session_id,
config_skill_registry.as_ref(),
);
Ok(Self {
config,
session_id,
anonymous_session,
tool_contract_prompt,
base_system,
persistent_system_prompt,
has_tools,
visible_messages,
recorded_messages,
transcript_events,
transcript_summary,
total_text,
last_iteration_text,
task_ledger,
ledger_done_rejections,
loop_tracker,
loop_detect_enabled,
total_iterations,
resumed_iterations,
consecutive_text_only,
consecutive_single_tool_turns,
idle_backoff_ms,
last_run_exit_code,
all_tools_used,
successful_tools_used,
rejected_tools,
permission_session_grants: std::collections::BTreeSet::new(),
deferred_user_messages,
daemon_state,
daemon_snapshot_path,
daemon_watch_state,
native_text_tool_fallbacks,
native_text_tool_fallback_rejections,
final_status,
loop_start,
bridge,
tool_format,
done_sentinel,
break_unless_phase,
max_iterations,
max_nudges,
tool_retries,
tool_backoff_ms,
exit_when_verified,
persistent,
daemon,
auto_compact,
daemon_config,
custom_nudge,
tool_search_client,
rehydrated_from_session: !rehydrated_active_skills.is_empty(),
active_skills: rehydrated_active_skills,
loaded_skills: Vec::new(),
skill_registry: config_skill_registry,
skill_match: config_skill_match,
working_files: config_working_files,
native_tools_snapshot,
_permission_guard,
_approval_guard,
_policy_guard,
_sink_guard,
_current_session_guard,
_iteration_guard,
})
}
}