Skip to main content

tycode_core/chat/
request.rs

1use crate::agents::agent::Agent;
2use crate::agents::defaults::prepare_system_prompt_and_tools;
3use crate::ai::error::AiError;
4use crate::ai::model::{Model, ModelCost};
5use crate::ai::provider::AiProvider;
6use crate::ai::tweaks::resolve_from_settings;
7use crate::ai::{ContentBlock, ConversationRequest, Message, MessageRole, ModelSettings};
8use crate::module::ContextBuilder;
9use crate::module::Module;
10use crate::module::PromptBuilder;
11use crate::modules::memory::MemoryConfig;
12use crate::settings::config::Settings;
13use crate::settings::SettingsManager;
14use crate::steering::SteeringDocuments;
15use crate::tools::r#trait::ToolExecutor;
16use crate::tools::registry::ToolRegistry;
17use crate::tools::ToolName;
18use anyhow::{bail, Result};
19use std::sync::Arc;
20use tracing::debug;
21
22/// Select the appropriate model for an agent based on settings and cost constraints.
23pub fn select_model_for_agent(
24    settings: &Settings,
25    provider: &dyn AiProvider,
26    agent_name: &str,
27) -> Result<ModelSettings, AiError> {
28    if let Some(override_model) = settings.get_agent_model(agent_name) {
29        return Ok(override_model.clone());
30    }
31
32    let quality = match agent_name {
33        "memory_summarizer" | "memory_manager" => {
34            let memory_config: MemoryConfig = settings.get_module_config("memory");
35            if agent_name == "memory_summarizer" {
36                memory_config.summarizer_cost
37            } else {
38                memory_config.recorder_cost
39            }
40        }
41        _ => settings.model_quality.unwrap_or(ModelCost::Unlimited),
42    };
43
44    let Some(mut model) = Model::select_for_cost(provider, quality) else {
45        return Err(AiError::Terminal(anyhow::anyhow!(
46            "No model available for {quality:?} in provider {}",
47            provider.name()
48        )));
49    };
50
51    if let Some(effort) = &settings.reasoning_effort {
52        model.reasoning_budget = effort.clone();
53    }
54
55    Ok(model)
56}
57
58/// Prepare an AI conversation request. This handles the work of fully
59/// assembling a request - including building the prompt (from the agent and
60/// prompt_builder), the context message (from the context_builder), selecting
61/// the correct model, etc.
62pub async fn prepare_request(
63    agent: &dyn Agent,
64    conversation: &[Message],
65    provider: &dyn AiProvider,
66    settings_manager: SettingsManager,
67    steering: &SteeringDocuments,
68    tools: Vec<Arc<dyn ToolExecutor>>,
69    prompt_builder: &PromptBuilder,
70    context_builder: &ContextBuilder,
71    modules: &[Arc<dyn Module>],
72) -> Result<(ConversationRequest, ModelSettings)> {
73    let settings = settings_manager.settings();
74    let agent_name = agent.name();
75
76    // Steering handles custom user-provided markdown files
77    // Prompt components (autonomy, style, etc.) are handled by PromptBuilder
78    let base_prompt =
79        steering.build_system_prompt(agent.core_prompt(), !settings.disable_custom_steering);
80
81    // Append prompt sections from components, filtered by agent's selection
82    let prompt_selection = agent.requested_prompt_components();
83    let filtered_content = prompt_builder.build(&settings, &prompt_selection, modules);
84    let system_prompt = format!("{}{}", base_prompt, filtered_content);
85
86    let model_settings = select_model_for_agent(&settings, provider, agent_name)?;
87
88    let allowed_tool_names: Vec<ToolName> = agent.available_tools();
89
90    let resolved_tweaks = resolve_from_settings(&settings, provider, model_settings.model);
91
92    let module_tools: Vec<Arc<dyn ToolExecutor>> = modules.iter().flat_map(|m| m.tools()).collect();
93    let all_tools: Vec<Arc<dyn ToolExecutor>> = tools.into_iter().chain(module_tools).collect();
94
95    let tool_registry = ToolRegistry::new(all_tools);
96    let available_tools = tool_registry.get_tool_definitions(&allowed_tool_names);
97
98    let context_selection = agent.requested_context_components();
99    let context_content = context_builder.build(&context_selection, modules).await;
100    let mut conversation = conversation.to_vec();
101    if conversation.is_empty() {
102        bail!("No messages to send to AI. Conversation is empty!")
103    }
104
105    if !context_content.is_empty() {
106        if let Some(last_msg) = conversation.last_mut() {
107            if last_msg.role == MessageRole::User {
108                last_msg.content.push(ContentBlock::Text(context_content));
109            }
110        }
111    }
112
113    let (final_system_prompt, final_tools) = prepare_system_prompt_and_tools(
114        &system_prompt,
115        available_tools,
116        resolved_tweaks.tool_call_style.clone(),
117    );
118
119    let request = ConversationRequest {
120        messages: conversation,
121        model: model_settings.clone(),
122        system_prompt: final_system_prompt,
123        stop_sequences: vec![],
124        tools: final_tools,
125    };
126
127    debug!(?request, "AI request");
128
129    Ok((request, model_settings))
130}