use crate::agents::agent::AgentGPT;
use crate::common::utils::{
Capability, ClientType, ContextManager, Knowledge, Persona, Planner, Reflection, Status, Task,
TaskScheduler, Tool,
};
#[cfg(feature = "cli")]
use crate::prelude::*;
use crate::traits::agent::Agent;
use auto_derive::Auto;
use std::borrow::Cow;
#[cfg(feature = "net")]
use crate::collaboration::Collaborator;
#[cfg(feature = "mem")]
use {
crate::common::memory::load_long_term_memory, crate::common::memory::long_term_memory_context,
crate::common::memory::save_long_term_memory,
};
#[cfg(feature = "oai")]
use {openai_dive::v1::models::Gpt4Model, openai_dive::v1::resources::chat::*};
#[cfg(feature = "cld")]
use anthropic_ai_sdk::types::message::{
ContentBlock, CreateMessageParams, Message as AnthMessage, MessageClient,
RequiredMessageParams, Role,
};
#[cfg(feature = "gem")]
use gems::{
chat::ChatBuilder, imagen::ImageGenBuilder, messages::Content, models::Model,
stream::StreamBuilder, traits::CTrait,
};
#[cfg(any(
feature = "co",
feature = "oai",
feature = "gem",
feature = "cld",
feature = "xai"
))]
use crate::traits::functions::ReqResponse;
#[cfg(feature = "xai")]
use x_ai::{
chat_compl::{ChatCompletionsRequestBuilder, Message as XaiMessage},
traits::ChatCompletionsFetcher,
};
#[cfg(feature = "cli")]
use {
crate::cli::session::{Session, SessionManager, SessionTask, TaskStatus as SessionTaskStatus},
crate::cli::skills::SkillStore,
crate::cli::tui::{
TaskStatus as TuiTaskStatus, create_spinner, print_agent_msg, print_error, print_section,
print_success, print_task_item, print_warning, render_markdown,
},
crate::prompts::generic::{
GENERIC_SYSTEM_PROMPT, IMPLEMENTATION_PLAN_PROMPT, LESSON_EXTRACTION_PROMPT,
REASONING_PROMPT, REFLECTION_PROMPT, TASK_EXECUTION_PROMPT, TASK_SYNTHESIS_PROMPT,
WALKTHROUGH_PROMPT,
},
colored::Colorize,
serde::{Deserialize, Serialize},
std::io::{self, BufRead, Write},
std::time::Duration,
termimad::crossterm::event::{self, Event, KeyCode},
tracing::{error, info, warn},
};
#[cfg(feature = "cli")]
#[derive(Debug, Clone, PartialEq)]
pub enum PhaseState {
Idle,
Synthesizing,
Planning,
AwaitingApproval,
Executing(usize),
Reflecting,
Complete,
}
#[cfg(feature = "cli")]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum ActionRequest {
CreateDir {
path: String,
},
CreateFile {
path: String,
content: String,
},
WriteFile {
path: String,
content: String,
},
ReadFile {
path: String,
},
PatchFile {
path: String,
old_text: String,
new_text: String,
},
AppendFile {
path: String,
content: String,
},
ListDir {
path: String,
},
FindInFile {
path: String,
pattern: String,
},
RunCommand {
cmd: String,
args: Vec<String>,
cwd: Option<String>,
},
GitCommit {
message: String,
},
}
#[cfg(feature = "cli")]
#[derive(Debug, Clone)]
pub struct ActionResult {
pub action_type: String,
pub path: Option<String>,
pub stdout: String,
pub stderr: String,
pub success: bool,
}
#[cfg(feature = "cli")]
#[derive(Debug, Deserialize, Default)]
pub struct ReasoningResult {
pub thought: String,
pub approach: String,
#[serde(default)]
pub risks: Vec<String>,
}
#[cfg(feature = "cli")]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ReflectionOutcome {
Success,
Retry,
Skip,
}
#[cfg(feature = "cli")]
#[derive(Debug, Deserialize)]
pub struct ReflectionResult {
pub outcome: ReflectionOutcome,
pub reasoning: String,
pub corrective_actions: Vec<ActionRequest>,
}
#[cfg(feature = "cli")]
#[derive(Debug, Default, Clone, Auto)]
pub struct GenericAgent {
pub agent: AgentGPT,
pub client: ClientType,
pub yolo: bool,
pub workspace: String,
pub model: String,
pub provider: String,
}
#[cfg(feature = "cli")]
use {
crate::traits::functions::{AsyncFunctions, Functions},
anyhow::Result,
async_trait::async_trait,
};
#[cfg(feature = "cli")]
#[async_trait]
impl Executor for GenericAgent {
async fn execute<'a>(
&'a mut self,
task: &'a mut Task,
execute: bool,
_browse: bool,
max_tries: u64,
) -> Result<()> {
let prompt = task.description.to_string();
if prompt.is_empty() {
return Ok(());
}
let max_retries = max_tries.clamp(1, 5) as u8;
let session_mgr = SessionManager::default();
session_mgr.ensure_dirs()?;
let workspace = if self.workspace.is_empty() {
std::env::var("AUTOGPT_WORKSPACE").unwrap_or_else(|_| {
dirs::home_dir()
.unwrap_or_else(|| std::path::PathBuf::from("."))
.join(".autogpt")
.join("workspace")
.to_string_lossy()
.to_string()
})
} else {
self.workspace.clone()
};
std::fs::create_dir_all(&workspace)?;
let workspace_path = std::path::PathBuf::from(&workspace);
let skills_dir = dirs::home_dir()
.unwrap_or_else(|| std::path::PathBuf::from("."))
.join(".autogpt")
.join("skills");
let skills = SkillStore::load_for_domain(&prompt, skills_dir.clone())
.unwrap_or_else(|_| SkillStore::new(skills_dir.clone()));
let skills_context = skills.to_prompt_context();
let model = if self.model.is_empty() {
"gemini-2.5-flash".to_string()
} else {
self.model.clone()
};
let provider = if self.provider.is_empty() {
"gemini".to_string()
} else {
self.provider.clone()
};
let mut session = Session::new(&prompt, &workspace, &model, &provider);
session.add_message("user", &prompt);
task.description = format!("[AutoGPT] {prompt}").into();
print_section("🔬 Synthesizing Task List");
let task_spinner = create_spinner("Decomposing your request into actionable tasks...");
let tasks = match self.synthesize_tasks(&prompt, "", &skills_context).await {
Ok(t) if !t.is_empty() => t,
Ok(_) => {
task_spinner.finish_and_clear();
print_error("LLM returned an empty task list.");
return Ok(());
}
Err(e) => {
task_spinner.finish_and_clear();
print_error(&format!("Task synthesis failed: {e}"));
return Err(e);
}
};
task_spinner.finish_and_clear();
session.set_tasks(tasks.clone());
session.add_message(
"assistant",
&tasks
.iter()
.enumerate()
.map(|(i, t)| format!("{}. {}", i + 1, t.description))
.collect::<Vec<_>>()
.join("\n"),
);
session_mgr.save(&session)?;
print_section("📋 Task Plan");
for t in &tasks {
print_task_item(&t.description, TuiTaskStatus::Pending);
}
print_section("🏗️ Generating Implementation Plan");
let plan_spinner = create_spinner("Architecting a production-grade solution...");
let plan = match self.generate_plan(&prompt, &tasks).await {
Ok(p) => p,
Err(e) => {
plan_spinner.finish_and_clear();
print_error(&format!("Plan generation failed: {e}"));
return Err(e);
}
};
plan_spinner.finish_and_clear();
session.set_plan(&plan);
session.add_message("assistant", &plan);
print_section("📑 Implementation Plan");
render_markdown(&plan);
session_mgr.save(&session)?;
if execute {
info!(
"{} Approve this plan and begin execution? {} ",
"?".bright_cyan().bold(),
"(yes / no)".bright_black()
);
print!("> ");
io::stdout().flush()?;
let mut approval = String::new();
io::stdin().lock().read_line(&mut approval)?;
if !crate::common::utils::is_yes(approval.trim()) {
print_warning("Plan not approved. Ready for next prompt.");
session.add_message("user", "Plan rejected.");
session_mgr.save(&session)?;
return Ok(());
}
}
print_section("⚙️ Executing Tasks via AutoGPT");
let tasks_snapshot = session.tasks.clone();
let total = tasks_snapshot.len();
for (idx, task_item) in tasks_snapshot.iter().enumerate() {
print_task_item(&task_item.description, TuiTaskStatus::InProgress);
session.update_task_status(idx, SessionTaskStatus::InProgress);
session_mgr.save(&session)?;
let exec_spinner = create_spinner(&format!(
"Task {}/{}: {}",
idx + 1,
total,
&task_item.description[..task_item.description.len().min(55)]
));
let completed_descs: Vec<String> = session
.tasks
.iter()
.take(idx)
.map(|t| t.description.clone())
.collect();
let completed_refs: Vec<&str> = completed_descs.iter().map(|s| s.as_str()).collect();
let reasoning = self
.reason_about_task(
task_item,
&plan,
&completed_refs,
idx + 1,
total,
&workspace,
)
.await;
session.add_reasoning(&reasoning.thought);
let mut results = self
.execute_task(
&prompt,
task_item,
idx + 1,
total,
&plan,
&completed_refs,
&workspace_path,
&mut session,
&reasoning,
)
.await
.unwrap_or_default();
exec_spinner.finish_and_clear();
let mut retry_count: u8 = 0;
loop {
let reflection = self
.reflect_on_task(task_item, &results, retry_count)
.await
.unwrap_or(ReflectionResult {
outcome: ReflectionOutcome::Success,
reasoning: "Assumed success.".into(),
corrective_actions: vec![],
});
match reflection.outcome {
ReflectionOutcome::Success => {
print_task_item(&task_item.description, TuiTaskStatus::Completed);
session.update_task_status(idx, SessionTaskStatus::Completed);
break;
}
ReflectionOutcome::Retry if retry_count < max_retries => {
retry_count += 1;
print_warning(&format!(
"Retry {retry_count}/{max_retries} - {}",
reflection.reasoning
));
results = Vec::new();
for action in &reflection.corrective_actions {
let r = GenericAgent::run_action(action, &workspace_path, &mut session)
.await;
results.push(r);
}
}
ReflectionOutcome::Retry | ReflectionOutcome::Skip => {
print_task_item(&task_item.description, TuiTaskStatus::Skipped);
session.update_task_status(idx, SessionTaskStatus::Skipped);
warn!(" ⊘ Skipped: {}", reflection.reasoning.bright_black());
break;
}
}
}
session_mgr.save(&session)?;
}
let build_succeeded = self
.build_and_verify(&workspace_path, &mut session, 3)
.await;
if !build_succeeded {
print_warning("Build verification failed after all attempts.");
}
print_section("📓 Generating Session Walkthrough");
let wt_spinner = create_spinner("Composing walkthrough document...");
let tasks_status = session
.tasks
.iter()
.enumerate()
.map(|(i, t)| format!("{}. {} [{}]", i + 1, t.description, t.status.as_str()))
.collect::<Vec<_>>()
.join("\n");
let files_str = session
.files_created
.iter()
.map(|f| format!("{} ({})", f.path, f.action))
.collect::<Vec<_>>()
.join("\n");
let wt_prompt = format!(
"{}\n\n{}",
GENERIC_SYSTEM_PROMPT,
WALKTHROUGH_PROMPT
.replace("{SESSION_ID}", &session.id)
.replace(
"{DATE}",
&session.created_at.format("%Y-%m-%d %H:%M UTC").to_string()
)
.replace("{MODEL}", &model)
.replace("{WORKSPACE}", &workspace)
.replace("{PROMPT}", &prompt)
.replace("{TASK_LIST_WITH_STATUSES}", &tasks_status)
.replace("{FILES_CREATED}", &files_str)
);
let walkthrough = self
.generate(&wt_prompt)
.await
.unwrap_or_else(|_| SessionManager::generate_walkthrough(&session));
wt_spinner.finish_and_clear();
session.set_walkthrough(&walkthrough);
session_mgr.save(&session)?;
let wt_path = session_mgr.base_dir.join("walkthrough.md");
std::fs::write(&wt_path, &walkthrough)?;
if let Some((domain, lessons, anti_patterns)) = self
.extract_lessons(&prompt, &session.tasks, &tasks_status)
.await
&& (!lessons.is_empty() || !anti_patterns.is_empty())
{
let mut store = SkillStore::new(skills_dir);
for lesson in &lessons {
let _ = store.save_lesson(&domain, lesson, None);
}
for ap in &anti_patterns {
let _ = store.save_lesson(&domain, "", Some(ap));
}
}
print_section("✅ Session Complete");
print_success(&format!("Walkthrough → {}", wt_path.display()));
print_success(&format!(
"Session → {}",
session_mgr.session_dir(&session.id).display()
));
print_success(&format!("Workspace → {workspace}"));
print_agent_msg(
"AutoGPT",
"All tasks complete. Ready for your next request.",
);
Ok(())
}
}
#[cfg(feature = "cli")]
impl GenericAgent {
async fn synthesize_tasks(
&mut self,
prompt: &str,
history: &str,
skills_context: &str,
) -> Result<Vec<SessionTask>> {
let full_prompt = TASK_SYNTHESIS_PROMPT
.replace("{PROMPT}", prompt)
.replace("{HISTORY}", history)
.replace("{SKILLS_CONTEXT}", skills_context);
let raw = self.generate(&full_prompt).await?;
let numbered: Vec<SessionTask> = raw
.lines()
.filter(|l| {
let t = l.trim();
!t.is_empty()
&& t.chars()
.next()
.map(|c| c.is_ascii_digit())
.unwrap_or(false)
})
.map(|l| {
let desc = l
.trim()
.trim_start_matches(|c: char| c.is_ascii_digit())
.trim_start_matches('.')
.trim()
.to_string();
SessionTask {
description: desc,
status: SessionTaskStatus::Pending,
}
})
.filter(|t| Self::is_valid_task_desc(&t.description))
.collect();
if !numbered.is_empty() {
return Ok(numbered);
}
let clean = crate::common::utils::strip_code_blocks(&raw);
if let Ok(arr) = serde_json::from_str::<Vec<String>>(clean.trim()) {
let from_json: Vec<SessionTask> = arr
.into_iter()
.filter(|s| Self::is_valid_task_desc(s))
.map(|s| SessionTask {
description: s,
status: SessionTaskStatus::Pending,
})
.collect();
if !from_json.is_empty() {
return Ok(from_json);
}
}
Err(anyhow::anyhow!(
"LLM returned malformed task list. Raw output: {}",
&raw[..raw.len().min(200)]
))
}
fn is_valid_task_desc(desc: &str) -> bool {
let words: Vec<&str> = desc.split_whitespace().collect();
words.len() >= 3 && desc.len() >= 15
}
async fn generate_plan(&mut self, prompt: &str, tasks: &[SessionTask]) -> Result<String> {
let task_list = tasks
.iter()
.enumerate()
.map(|(i, t)| format!("{}. {}", i + 1, t.description))
.collect::<Vec<_>>()
.join("\n");
let title: String = prompt.chars().take(60).collect();
let full_prompt = IMPLEMENTATION_PLAN_PROMPT
.replace("{TITLE}", &title)
.replace("{PROMPT}", prompt)
.replace("{TASK_LIST}", &task_list);
self.generate(&full_prompt).await
}
async fn reason_about_task(
&mut self,
task: &SessionTask,
plan: &str,
completed: &[&str],
task_num: usize,
task_total: usize,
workspace: &str,
) -> ReasoningResult {
let completed_str = completed
.iter()
.enumerate()
.map(|(i, t)| format!("{}. {}", i + 1, t))
.collect::<Vec<_>>()
.join("\n");
let plan_lines: Vec<&str> = plan.lines().collect();
let plan_excerpt: String = plan_lines
.iter()
.skip_while(|l| !l.contains(&task.description[..task.description.len().min(30)]))
.take(10)
.cloned()
.collect::<Vec<_>>()
.join("\n");
let full_prompt = REASONING_PROMPT
.replace("{TASK_NUM}", &task_num.to_string())
.replace("{TASK_TOTAL}", &task_total.to_string())
.replace("{TASK_DESCRIPTION}", &task.description)
.replace(
"{PLAN_EXCERPT}",
if plan_excerpt.is_empty() {
&plan[..plan.len().min(500)]
} else {
&plan_excerpt
},
)
.replace("{COMPLETED_TASKS}", &completed_str)
.replace("{WORKSPACE}", workspace);
let raw = self.generate(&full_prompt).await.unwrap_or_default();
let clean = crate::common::utils::strip_code_blocks(&raw);
serde_json::from_str::<ReasoningResult>(clean.trim()).unwrap_or_else(|_| ReasoningResult {
thought: raw.chars().take(300).collect(),
approach: String::new(),
risks: vec![],
})
}
#[allow(clippy::too_many_arguments)]
async fn execute_task(
&mut self,
prompt: &str,
task: &SessionTask,
task_num: usize,
task_total: usize,
plan: &str,
completed: &[&str],
workspace: &std::path::Path,
session: &mut Session,
reasoning: &ReasoningResult,
) -> Result<Vec<ActionResult>> {
let completed_str = completed
.iter()
.enumerate()
.map(|(i, t)| format!("{}. {}", i + 1, t))
.collect::<Vec<_>>()
.join("\n");
let plan_lines: Vec<&str> = plan.lines().collect();
let plan_excerpt: String = plan_lines
.iter()
.skip_while(|l| !l.contains(&task.description[..task.description.len().min(30)]))
.take(15)
.cloned()
.collect::<Vec<_>>()
.join("\n");
let reasoning_text = if reasoning.thought.is_empty() {
String::new()
} else {
format!(
"Thought: {}\nApproach: {}\nRisks: {}",
reasoning.thought,
reasoning.approach,
reasoning.risks.join(", ")
)
};
let execution_prompt = TASK_EXECUTION_PROMPT
.replace("{WORKSPACE}", &workspace.to_string_lossy())
.replace("{PROMPT}", prompt)
.replace("{TASK_NUM}", &task_num.to_string())
.replace("{TASK_TOTAL}", &task_total.to_string())
.replace("{TASK_DESCRIPTION}", &task.description)
.replace(
"{PLAN_EXCERPT}",
if plan_excerpt.is_empty() {
plan
} else {
&plan_excerpt
},
)
.replace("{COMPLETED_TASKS}", &completed_str)
.replace("{REASONING}", &reasoning_text);
let combined = format!(
"{}\n\nYou are operating inside workspace: {}\n\n{}",
GENERIC_SYSTEM_PROMPT,
workspace.display(),
execution_prompt
);
let raw = self.generate(&combined).await.unwrap_or_default();
let clean = crate::common::utils::strip_code_blocks(&raw);
let actions: Vec<ActionRequest> = serde_json::from_str(clean.trim()).unwrap_or_default();
let mut results = Vec::new();
for action in &actions {
let result = Self::run_action(action, workspace, session).await;
results.push(result);
}
Ok(results)
}
pub async fn run_action(
action: &ActionRequest,
workspace: &std::path::Path,
session: &mut Session,
) -> ActionResult {
match action {
ActionRequest::CreateDir { path } => {
let abs = workspace.join(path);
match std::fs::create_dir_all(&abs) {
Ok(_) => {
info!(" {} {}", "📁".bright_cyan(), path.bright_blue());
ActionResult {
action_type: "CreateDir".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: String::new(),
success: true,
}
}
Err(e) => ActionResult {
action_type: "CreateDir".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
ActionRequest::CreateFile { path, content }
| ActionRequest::WriteFile { path, content } => {
let abs = workspace.join(path);
let action_type = match action {
ActionRequest::CreateFile { .. } => "CreateFile",
_ => "WriteFile",
};
if let Some(parent) = abs.parent() {
let _ = std::fs::create_dir_all(parent);
}
match std::fs::write(&abs, content) {
Ok(_) => {
info!(" {} {}", "📄".bright_cyan(), path.bright_blue());
session.record_file(path, action_type);
ActionResult {
action_type: action_type.into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: String::new(),
success: true,
}
}
Err(e) => ActionResult {
action_type: action_type.into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
ActionRequest::ReadFile { path } => {
let abs = workspace.join(path);
match std::fs::read_to_string(&abs) {
Ok(content) => {
info!(" {} {}", "📖".bright_cyan(), path.bright_blue());
ActionResult {
action_type: "ReadFile".into(),
path: Some(path.clone()),
stdout: content,
stderr: String::new(),
success: true,
}
}
Err(e) => ActionResult {
action_type: "ReadFile".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
ActionRequest::PatchFile {
path,
old_text,
new_text,
} => {
let abs = workspace.join(path);
match std::fs::read_to_string(&abs) {
Ok(content) => {
if !content.contains(old_text.as_str()) {
return ActionResult {
action_type: "PatchFile".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: format!(
"patch anchor not found in {path}. \
Use ReadFile first to confirm the exact text."
),
success: false,
};
}
let patched = content.replacen(old_text.as_str(), new_text.as_str(), 1);
match std::fs::write(&abs, &patched) {
Ok(_) => {
info!(" {} {}", "✏️ ".bright_cyan(), path.bright_blue());
session.record_file(path, "PatchFile");
ActionResult {
action_type: "PatchFile".into(),
path: Some(path.clone()),
stdout: format!("Patched {path} successfully."),
stderr: String::new(),
success: true,
}
}
Err(e) => ActionResult {
action_type: "PatchFile".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
Err(e) => ActionResult {
action_type: "PatchFile".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
ActionRequest::AppendFile { path, content } => {
let abs = workspace.join(path);
if let Some(parent) = abs.parent() {
let _ = std::fs::create_dir_all(parent);
}
use std::io::Write as IoWrite;
match std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&abs)
{
Ok(mut file) => match file.write_all(content.as_bytes()) {
Ok(_) => {
info!(" {} {}", "➕".bright_cyan(), path.bright_blue());
session.record_file(path, "AppendFile");
ActionResult {
action_type: "AppendFile".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: String::new(),
success: true,
}
}
Err(e) => ActionResult {
action_type: "AppendFile".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
},
Err(e) => ActionResult {
action_type: "AppendFile".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
ActionRequest::ListDir { path } => {
let abs = if path == "." {
workspace.to_path_buf()
} else {
workspace.join(path)
};
match std::fs::read_dir(&abs) {
Ok(entries) => {
let mut lines = Vec::new();
for entry in entries.flatten() {
let name = entry.file_name().to_string_lossy().to_string();
let is_dir = entry.file_type().map(|t| t.is_dir()).unwrap_or(false);
lines.push(if is_dir { format!("{name}/") } else { name });
}
lines.sort();
ActionResult {
action_type: "ListDir".into(),
path: Some(path.clone()),
stdout: lines.join("\n"),
stderr: String::new(),
success: true,
}
}
Err(e) => ActionResult {
action_type: "ListDir".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
ActionRequest::FindInFile { path, pattern } => {
let abs = workspace.join(path);
match std::fs::read_to_string(&abs) {
Ok(content) => {
let matches: Vec<String> = content
.lines()
.enumerate()
.filter(|(_, line)| line.contains(pattern.as_str()))
.map(|(i, line)| format!("{}:{}", i + 1, line))
.collect();
ActionResult {
action_type: "FindInFile".into(),
path: Some(path.clone()),
stdout: matches.join("\n"),
stderr: String::new(),
success: true,
}
}
Err(e) => ActionResult {
action_type: "FindInFile".into(),
path: Some(path.clone()),
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
ActionRequest::RunCommand { cmd, args, cwd } => {
let working_dir = cwd
.as_ref()
.map(|c| workspace.join(c))
.unwrap_or_else(|| workspace.to_path_buf());
info!(
" {} {} {}",
"⚡".bright_magenta(),
cmd.bright_cyan().bold(),
args.join(" ").bright_white()
);
match tokio::process::Command::new(cmd)
.args(args)
.current_dir(&working_dir)
.output()
.await
{
Ok(out) => {
let stdout = String::from_utf8_lossy(&out.stdout).to_string();
let stderr = String::from_utf8_lossy(&out.stderr).to_string();
let success = out.status.success();
if !stdout.trim().is_empty() {
for line in stdout.lines().take(20) {
info!(" {}", line.bright_black());
}
}
if !success && !stderr.trim().is_empty() {
for line in stderr.lines().take(10) {
error!(" {}", line.bright_red());
}
}
ActionResult {
action_type: "RunCommand".into(),
path: None,
stdout,
stderr,
success,
}
}
Err(e) => {
error!(" {} Command failed: {}", "✗".bright_red(), e);
ActionResult {
action_type: "RunCommand".into(),
path: None,
stdout: String::new(),
stderr: e.to_string(),
success: false,
}
}
}
}
ActionRequest::GitCommit { message } => {
let _ = tokio::process::Command::new("git")
.args(["add", "-A"])
.current_dir(workspace)
.output()
.await;
match tokio::process::Command::new("git")
.args(["commit", "-m", message])
.current_dir(workspace)
.output()
.await
{
Ok(out) => {
let success = out.status.success();
if success {
info!(" {} git: {}", "🔖".bright_cyan(), message.bright_black());
}
ActionResult {
action_type: "GitCommit".into(),
path: None,
stdout: String::from_utf8_lossy(&out.stdout).to_string(),
stderr: String::from_utf8_lossy(&out.stderr).to_string(),
success,
}
}
Err(e) => ActionResult {
action_type: "GitCommit".into(),
path: None,
stdout: String::new(),
stderr: e.to_string(),
success: false,
},
}
}
}
}
fn detect_build_system(workspace: &std::path::Path) -> Option<ActionRequest> {
if workspace.join("Cargo.toml").exists() {
return Some(ActionRequest::RunCommand {
cmd: "cargo".into(),
args: vec!["build".into()],
cwd: None,
});
}
if workspace.join("package.json").exists() {
return Some(ActionRequest::RunCommand {
cmd: "npm".into(),
args: vec!["run".into(), "build".into()],
cwd: None,
});
}
if workspace.join("pyproject.toml").exists()
|| workspace.join("setup.py").exists()
|| workspace.join("requirements.txt").exists()
{
return Some(ActionRequest::RunCommand {
cmd: "python3".into(),
args: vec!["-m".into(), "compileall".into(), "-q".into(), ".".into()],
cwd: None,
});
}
if workspace.join("go.mod").exists() {
return Some(ActionRequest::RunCommand {
cmd: "go".into(),
args: vec!["build".into(), "./...".into()],
cwd: None,
});
}
None
}
async fn build_and_verify(
&mut self,
workspace: &std::path::Path,
session: &mut Session,
max_attempts: u8,
) -> bool {
let build_action = match Self::detect_build_system(workspace) {
Some(a) => a,
None => return true,
};
print_section("🔨 Verifying Build");
for attempt in 0..max_attempts {
session.increment_build_attempt();
let result = Self::run_action(&build_action, workspace, session).await;
if result.success {
print_success(&format!("Build passed on attempt {}", attempt + 1));
return true;
}
if attempt + 1 >= max_attempts {
break;
}
print_warning(&format!(
"Build attempt {} failed. Asking LLM for fix...",
attempt + 1
));
let error_context = format!(
"Build failed:\nstdout: {}\nstderr: {}",
&result.stdout[..result.stdout.len().min(800)],
&result.stderr[..result.stderr.len().min(800)]
);
let fix_prompt = format!(
"{}\n\nYou are operating in workspace: {}\n\n\
The project build just failed. Analyze the error output and emit a JSON array \
of action directives that fix the compilation/syntax errors.\n\n\
Error context:\n{error_context}\n\n\
Output only a valid JSON array starting with `[` and ending with `]`.",
GENERIC_SYSTEM_PROMPT,
workspace.display()
);
let raw = self.generate(&fix_prompt).await.unwrap_or_default();
let clean = crate::common::utils::strip_code_blocks(&raw);
let fix_actions: Vec<ActionRequest> =
serde_json::from_str(clean.trim()).unwrap_or_default();
for action in &fix_actions {
let _ = Self::run_action(action, workspace, session).await;
}
}
false
}
async fn reflect_on_task(
&mut self,
task: &SessionTask,
results: &[ActionResult],
retry_attempt: u8,
) -> Result<ReflectionResult> {
let actions_str = results
.iter()
.map(|r| format!("- {} {:?}", r.action_type, r.path))
.collect::<Vec<_>>()
.join("\n");
let outputs_str = results
.iter()
.map(|r| {
format!(
"[{}] success={}\nstdout: {}\nstderr: {}",
r.action_type,
r.success,
r.stdout.chars().take(600).collect::<String>(),
r.stderr.chars().take(400).collect::<String>()
)
})
.collect::<Vec<_>>()
.join("\n\n");
let full_prompt = REFLECTION_PROMPT
.replace("{TASK_DESCRIPTION}", &task.description)
.replace("{ACTIONS_EXECUTED}", &actions_str)
.replace("{COMMAND_OUTPUTS}", &outputs_str)
.replace("{RETRY_ATTEMPT}", &retry_attempt.to_string());
let raw = self.generate(&full_prompt).await.unwrap_or_default();
let clean = crate::common::utils::strip_code_blocks(&raw);
match serde_json::from_str(clean.trim()) {
Ok(result) => Ok(result),
Err(_) => Ok(ReflectionResult {
outcome: ReflectionOutcome::Success,
reasoning: "Could not parse reflection - assuming success.".into(),
corrective_actions: vec![],
}),
}
}
async fn extract_lessons(
&mut self,
prompt: &str,
tasks: &[SessionTask],
results_summary: &str,
) -> Option<(String, Vec<String>, Vec<String>)> {
let tasks_str = tasks
.iter()
.enumerate()
.map(|(i, t)| format!("{}. {} [{}]", i + 1, t.description, t.status.as_str()))
.collect::<Vec<_>>()
.join("\n");
let full_prompt = LESSON_EXTRACTION_PROMPT
.replace("{ORIGINAL_PROMPT}", prompt)
.replace("{TASKS}", &tasks_str)
.replace("{RESULTS}", results_summary);
let raw = self.generate(&full_prompt).await.ok()?;
let clean = crate::common::utils::strip_code_blocks(&raw);
#[derive(Deserialize)]
struct LessonOutput {
domain: String,
#[serde(default)]
lessons: Vec<String>,
#[serde(default)]
anti_patterns: Vec<String>,
}
serde_json::from_str::<LessonOutput>(clean.trim())
.ok()
.map(|l| (l.domain, l.lessons, l.anti_patterns))
}
}
#[cfg(feature = "cli")]
pub async fn run_generic_agent_loop(yolo: bool, session_id: Option<&str>) -> anyhow::Result<()> {
use crate::cli::tui::{
print_banner, print_greeting, print_status_bar, render_help_table, render_input_box_hint,
render_warning_box,
};
print_banner();
print_greeting();
if yolo {
warn!(
"{}",
"⚡ YOLO mode active - all plans will be auto-approved."
.bright_yellow()
.bold()
);
info!("");
}
let session_mgr = SessionManager::default();
session_mgr.ensure_dirs()?;
if let Some(id) = session_id {
match session_mgr.load(id) {
Ok(session) => {
print_section("📂 Resuming Session");
info!(
" {} {} - {} tasks, {} messages",
"▸".bright_cyan(),
session.title.white().bold(),
session.tasks.len(),
session.messages.len()
);
info!("");
for msg in &session.messages {
info!(
" {} {}",
format!("[{}]", msg.role).bright_magenta(),
msg.content
.chars()
.take(120)
.collect::<String>()
.bright_white()
);
}
info!("");
}
Err(e) => print_warning(&format!("Could not load session {id}: {e}")),
}
}
let workspace = std::env::var("AUTOGPT_WORKSPACE").unwrap_or_else(|_| ".".to_string());
std::fs::create_dir_all(&workspace)?;
if matches!((std::env::current_dir(), dirs::home_dir()), (Ok(cwd), Some(home)) if cwd == home) {
render_warning_box(
"You are running AutoGPT in your home directory.\n\
It is recommended to run AutoGPT from a project-specific directory\n\
so that generated files are scoped correctly.",
);
}
let mut current_provider = crate::cli::models::default_provider();
let mut current_model = crate::cli::models::default_model(¤t_provider);
let mut available_models = crate::cli::models::provider_models(¤t_provider);
let mut current_model_idx = crate::cli::models::model_index(&available_models, ¤t_model);
let mut agent = GenericAgent {
yolo,
workspace: workspace.clone(),
model: current_model.clone(),
provider: current_provider.clone(),
..Default::default()
};
loop {
if let Ok(cwd) = std::env::current_dir() {
print_status_bar(&cwd.to_string_lossy(), ¤t_model, ¤t_provider);
}
render_input_box_hint();
info!(
"{} {}",
">".bright_blue().bold(),
"Type your request, or /help for commands".bright_black()
);
print!("> ");
io::stdout().flush()?;
let stdin = io::stdin();
let mut line = String::new();
stdin.lock().read_line(&mut line)?;
let input = line.trim().to_string();
if input.is_empty() {
print_warning("Please enter a prompt to work on.");
continue;
}
if input.eq_ignore_ascii_case("exit") || input.eq_ignore_ascii_case("quit") {
print_success("Session saved. Goodbye!");
break;
}
if input.eq_ignore_ascii_case("/help") {
render_help_table();
continue;
}
if input.eq_ignore_ascii_case("/clear") {
print!("\x1B[2J\x1B[1;1H");
io::stdout().flush()?;
print_banner();
print_greeting();
continue;
}
if input.eq_ignore_ascii_case("/workspace") {
info!(
"{} {}",
"Workspace:".bright_cyan(),
workspace.bright_white()
);
continue;
}
if input.eq_ignore_ascii_case("/status") {
let cwd = std::env::current_dir()
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_default();
info!("{} {}", "Directory:".bright_cyan(), cwd.bright_white());
info!(
"{} {}",
"Model:".bright_cyan(),
current_model.bright_magenta()
);
info!(
"{} {}",
"Provider:".bright_cyan(),
current_provider.bright_white()
);
info!(
"{} {}",
"Workspace:".bright_cyan(),
workspace.bright_white()
);
continue;
}
if input.eq_ignore_ascii_case("/provider") {
let providers = ["gemini", "openai", "anthropic", "xai", "cohere"];
info!("");
info!("{}", "Select Provider:".bright_cyan().bold());
for (i, p) in providers.iter().enumerate() {
if *p == current_provider {
info!(
" {} {} {}",
format!("{}.", i + 1).bright_cyan(),
p.bright_white().bold(),
"(active)".bright_green()
);
} else {
info!(
" {} {}",
format!("{}.", i + 1).bright_black(),
p.bright_white()
);
}
}
info!("");
print!("> Enter number: ");
io::stdout().flush()?;
let mut pick = String::new();
io::stdin().lock().read_line(&mut pick)?;
if let (Ok(n), current_len) = (pick.trim().parse::<usize>(), providers.len())
&& n >= 1
&& n <= current_len
{
current_provider = providers[n - 1].to_string();
agent.provider = current_provider.clone();
available_models = crate::cli::models::provider_models(¤t_provider);
current_model = crate::cli::models::default_model(¤t_provider);
current_model_idx =
crate::cli::models::model_index(&available_models, ¤t_model);
agent.model = current_model.clone();
print_success(&format!("Switched to provider: {current_provider}"));
}
continue;
}
if input.eq_ignore_ascii_case("/models") {
if available_models.is_empty() {
print_warning(
"No models available for the current provider. Try setting the appropriate API key.",
);
continue;
}
let selected =
crate::cli::tui::render_model_selector(&available_models, current_model_idx);
current_model_idx = selected;
current_model = available_models[selected].id.clone();
agent.model = current_model.clone();
print_success(&format!(
"Model set to: {}",
available_models[selected].display_name
));
continue;
}
if input.eq_ignore_ascii_case("/sessions") {
match session_mgr.list() {
Ok(entries) if !entries.is_empty() => {
print_section("📁 Recent Sessions");
for (i, entry) in entries.iter().enumerate() {
info!(
" {} {} {} {}",
format!("{}.", i + 1).bright_cyan(),
entry.title.white().bold(),
format!("({}/{})", entry.completed_count, entry.task_count)
.bright_green(),
entry
.updated_at
.format("%Y-%m-%d %H:%M")
.to_string()
.bright_black()
);
info!(" {} {}", "↳".bright_black(), entry.id.bright_black());
}
info!("");
print!("> Enter number to resume (or press Enter to skip): ");
io::stdout().flush()?;
let mut pick = String::new();
io::stdin().lock().read_line(&mut pick)?;
let pick = pick.trim();
if let Some(entry) = pick.parse::<usize>().ok().and_then(|n| {
if n >= 1 && n <= entries.len() {
Some(&entries[n - 1])
} else {
None
}
}) {
match session_mgr.load(&entry.id) {
Ok(s) => {
print_section("📂 Resumed Session");
info!(" {} {}", "▸".bright_cyan(), s.title.white().bold());
for msg in &s.messages {
info!(
" {} {}",
format!("[{}]", msg.role).bright_magenta(),
msg.content
.chars()
.take(120)
.collect::<String>()
.bright_white()
);
}
}
Err(e) => print_error(&format!("Failed to load session: {e}")),
}
}
}
Ok(_) => print_warning("No previous sessions found."),
Err(e) => print_error(&format!("Failed to list sessions: {e}")),
}
continue;
}
agent.agent.behavior = input.clone().into();
let arc_agent: Arc<Mutex<Box<dyn AgentFunctions>>> =
Arc::new(Mutex::new(Box::new(agent.clone())));
let autogpt = AutoGPT::default()
.execute(!yolo)
.max_tries(2)
.with(vec![arc_agent])
.build()
.expect("Failed to build AutoGPT");
let mut interrupt_handle = tokio::spawn(async move {
loop {
if matches!(event::poll(Duration::from_millis(100)), Ok(true))
&& let Ok(Event::Key(key)) = event::read()
&& key.code == KeyCode::Esc
{
return;
}
tokio::time::sleep(Duration::from_millis(50)).await;
}
});
tokio::select! {
res = autogpt.run() => {
interrupt_handle.abort();
match res {
Ok(msg) => info!("{}", msg.bright_green()),
Err(e) => print_error(&format!("Agent error: {e:?}")),
}
}
_ = &mut interrupt_handle => {
print_warning("Execution interrupted by user (ESC pressed).");
}
}
}
Ok(())
}