use crate::command::chat::api::{build_request_with_tools, create_openai_client};
use crate::command::chat::app::AskRequest;
use crate::command::chat::hook::HookManager;
use crate::command::chat::permission::JcliConfig;
use crate::command::chat::storage::{ChatMessage, ModelProvider, ToolCallItem};
use crate::command::chat::tools::ToolRegistry;
use crate::command::chat::tools::background::BackgroundManager;
use crate::command::chat::tools::task::TaskManager;
use crate::util::log::write_info_log;
use std::sync::{
Arc, Mutex,
atomic::{AtomicBool, AtomicU64, Ordering},
mpsc,
};
pub struct SubAgentSnapshot {
pub id: String,
pub description: String,
pub mode: &'static str, pub is_running: Arc<AtomicBool>,
pub system_prompt: Arc<Mutex<String>>,
pub messages: Arc<Mutex<Vec<ChatMessage>>>,
}
pub struct SubAgentTracker {
agents: Mutex<Vec<SubAgentSnapshot>>,
counter: AtomicU64,
}
pub type SubAgentRegistration = (
String,
Arc<AtomicBool>,
Arc<Mutex<String>>,
Arc<Mutex<Vec<ChatMessage>>>,
);
pub type RunningSubAgentDump = (String, String, &'static str, String, Vec<ChatMessage>);
impl SubAgentTracker {
pub fn new() -> Self {
Self {
agents: Mutex::new(Vec::new()),
counter: AtomicU64::new(1),
}
}
pub fn register(&self, description: &str, mode: &'static str) -> SubAgentRegistration {
let id = format!("sub_{:04}", self.counter.fetch_add(1, Ordering::Relaxed));
let is_running = Arc::new(AtomicBool::new(true));
let system_prompt = Arc::new(Mutex::new(String::new()));
let messages = Arc::new(Mutex::new(Vec::new()));
if let Ok(mut list) = self.agents.lock() {
list.push(SubAgentSnapshot {
id: id.clone(),
description: description.to_string(),
mode,
is_running: Arc::clone(&is_running),
system_prompt: Arc::clone(&system_prompt),
messages: Arc::clone(&messages),
});
}
(id, is_running, system_prompt, messages)
}
pub fn snapshot_running(&self) -> Vec<RunningSubAgentDump> {
let list = match self.agents.lock() {
Ok(l) => l,
Err(_) => return Vec::new(),
};
list.iter()
.filter(|s| s.is_running.load(Ordering::Relaxed))
.map(|s| {
let sp = s
.system_prompt
.lock()
.map(|x| x.clone())
.unwrap_or_default();
let msgs = s.messages.lock().map(|x| x.clone()).unwrap_or_default();
(s.id.clone(), s.description.clone(), s.mode, sp, msgs)
})
.collect()
}
pub fn gc_finished(&self) {
if let Ok(mut list) = self.agents.lock() {
list.retain(|s| s.is_running.load(Ordering::Relaxed));
}
}
}
impl Default for SubAgentTracker {
fn default() -> Self {
Self::new()
}
}
#[derive(Clone)]
pub struct AgentToolShared {
pub background_manager: Arc<BackgroundManager>,
pub provider: Arc<Mutex<ModelProvider>>,
pub system_prompt: Arc<Mutex<Option<String>>>,
pub jcli_config: Arc<JcliConfig>,
pub hook_manager: Arc<Mutex<HookManager>>,
pub task_manager: Arc<TaskManager>,
pub disabled_tools: Arc<Vec<String>>,
pub permission_queue: Arc<crate::command::chat::permission_queue::PermissionQueue>,
pub plan_approval_queue: Arc<crate::command::chat::tools::plan::PlanApprovalQueue>,
pub sub_agent_tracker: Arc<SubAgentTracker>,
}
impl AgentToolShared {
pub fn build_sub_registry(&self) -> (ToolRegistry, mpsc::Receiver<AskRequest>) {
let (ask_tx, ask_rx) = mpsc::channel::<AskRequest>();
let mut registry = ToolRegistry::new(
vec![], ask_tx,
Arc::clone(&self.background_manager),
Arc::clone(&self.task_manager),
Arc::clone(&self.hook_manager),
crate::command::chat::compact::new_invoked_skills_map(),
);
registry.permission_queue = Some(Arc::clone(&self.permission_queue));
registry.plan_approval_queue = Some(Arc::clone(&self.plan_approval_queue));
(registry, ask_rx)
}
}
pub fn create_runtime_and_client(
provider: &ModelProvider,
) -> Result<
(
tokio::runtime::Runtime,
async_openai::Client<async_openai::config::OpenAIConfig>,
),
String,
> {
let rt = tokio::runtime::Runtime::new()
.map_err(|e| format!("Failed to create async runtime: {}", e))?;
let client = create_openai_client(provider);
Ok((rt, client))
}
pub fn call_llm_non_stream(
rt: &tokio::runtime::Runtime,
client: &async_openai::Client<async_openai::config::OpenAIConfig>,
provider: &ModelProvider,
messages: &[ChatMessage],
tools: &[async_openai::types::chat::ChatCompletionTools],
system_prompt: Option<&str>,
) -> Result<async_openai::types::chat::ChatChoice, String> {
let request = build_request_with_tools(provider, messages, tools.to_vec(), system_prompt)
.map_err(|e| format!("Failed to build request: {}", e))?;
let response = rt
.block_on(async { client.chat().create(request).await })
.map_err(|e| format!("API request failed: {}", e))?;
response
.choices
.into_iter()
.next()
.ok_or_else(|| "[No response from API]".to_string())
}
pub fn extract_tool_items(
tool_calls: &[async_openai::types::chat::ChatCompletionMessageToolCalls],
) -> Vec<ToolCallItem> {
tool_calls
.iter()
.filter_map(|tc| {
if let async_openai::types::chat::ChatCompletionMessageToolCalls::Function(f) = tc {
Some(ToolCallItem {
id: f.id.clone(),
name: f.function.name.clone(),
arguments: f.function.arguments.clone(),
})
} else {
None
}
})
.collect()
}
pub fn execute_tool_with_permission(
item: &ToolCallItem,
registry: &Arc<ToolRegistry>,
jcli_config: &Arc<JcliConfig>,
cancelled: &Arc<AtomicBool>,
log_tag: &str,
verbose: bool,
) -> ChatMessage {
if cancelled.load(Ordering::Relaxed) {
return ChatMessage {
role: "tool".to_string(),
content: "[Cancelled]".to_string(),
tool_calls: None,
tool_call_id: Some(item.id.clone()),
images: None,
};
}
if jcli_config.is_denied(&item.name, &item.arguments) {
if verbose {
write_info_log(log_tag, &format!("Tool denied by deny rule: {}", item.name));
}
return ChatMessage {
role: "tool".to_string(),
content: format!("Tool '{}' was denied by permission rules.", item.name),
tool_calls: None,
tool_call_id: Some(item.id.clone()),
images: None,
};
}
let tool_ref = registry.get(&item.name);
let requires_confirm = tool_ref.map(|t| t.requires_confirmation()).unwrap_or(false);
if requires_confirm && !jcli_config.is_allowed(&item.name, &item.arguments) {
if let Some(queue) = registry.permission_queue.as_ref() {
let agent_name = crate::command::chat::teammate::current_agent_name();
let confirm_msg = tool_ref
.map(|t| t.confirmation_message(&item.arguments))
.unwrap_or_else(|| format!("调用工具 {}", item.name));
let req = crate::command::chat::permission_queue::PendingAgentPerm::new(
agent_name,
item.name.clone(),
item.arguments.clone(),
confirm_msg,
);
write_info_log(
log_tag,
&format!(
"Tool '{}' queued for user permission (60s timeout)",
item.name
),
);
let approved = queue.request_blocking(req);
if !approved {
write_info_log(log_tag, &format!("Tool '{}' denied by user", item.name));
return ChatMessage {
role: "tool".to_string(),
content: format!("Tool '{}' was denied by the user.", item.name),
tool_calls: None,
tool_call_id: Some(item.id.clone()),
images: None,
};
}
} else {
if verbose {
write_info_log(
log_tag,
&format!(
"Tool '{}' requires confirmation but not auto-allowed, denying",
item.name
),
);
}
return ChatMessage {
role: "tool".to_string(),
content: format!(
"Tool '{}' requires user confirmation which is not available in sub-agent mode. \
Add a permission rule to allow this tool automatically.",
item.name
),
tool_calls: None,
tool_call_id: Some(item.id.clone()),
images: None,
};
}
}
if verbose {
write_info_log(
log_tag,
&format!("Executing tool: {} args: {}", item.name, item.arguments),
);
}
let result = registry.execute(&item.name, &item.arguments, cancelled);
if verbose {
write_info_log(
log_tag,
&format!(
"Tool result: {} is_error={} len={}",
item.name,
result.is_error,
result.output.len()
),
);
}
ChatMessage {
role: "tool".to_string(),
content: result.output,
tool_calls: None,
tool_call_id: Some(item.id.clone()),
images: None,
}
}