#[cfg(feature = "cli")]
use crate::agents::agent::AgentGPT;
#[allow(unused_imports)]
use crate::traits::agent::Agent;
#[cfg(feature = "cli")]
use colored::Colorize;
#[cfg(feature = "cli")]
use indicatif::{ProgressBar, ProgressStyle};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::borrow::Cow;
use std::env::var;
#[cfg(feature = "cli")]
use std::{io, io::Read, process::Command, process::Stdio};
#[cfg(feature = "cli")]
use webbrowser::{Browser, BrowserOptions, open_browser_with_options};
#[cfg(feature = "cli")]
use {
crates_io_api::AsyncClient,
semver::Version,
std::io::Write,
tracing::{Event, Subscriber, error, info, warn},
tracing_appender::rolling,
tracing_subscriber::Layer,
tracing_subscriber::Registry,
tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields},
tracing_subscriber::prelude::__tracing_subscriber_SubscriberExt,
tracing_subscriber::registry::LookupSpan,
tracing_subscriber::{filter, fmt},
};
#[cfg(feature = "gem")]
#[allow(unused_imports)]
use gems::{
Client as GeminiClient,
messages::{Content, Message as GeminiMessage},
models::Model as GeminiModel,
traits::CTrait,
};
#[cfg(feature = "oai")]
use openai_dive::v1::{
api::Client as OpenAIClient,
models::FlagshipModel,
resources::chat::{ChatMessage, ChatMessageContent},
};
#[cfg(feature = "cld")]
use anthropic_ai_sdk::{
client::AnthropicClient,
types::message::{Message as AnthMessage, MessageError},
};
use chrono::prelude::*;
use std::collections::HashMap;
#[cfg(feature = "xai")]
use x_ai::{chat_compl::Message as XaiMessage, client::XaiClient, traits::ClientConfig};
use derivative::Derivative;
#[cfg(feature = "cli")]
use std::time::Duration;
#[derive(Debug, Clone)]
pub enum ClientType {
#[cfg(feature = "oai")]
OpenAI(OpenAIClient),
#[cfg(feature = "gem")]
Gemini(GeminiClient),
#[cfg(feature = "cld")]
Anthropic(AnthropicClient),
#[cfg(feature = "xai")]
Xai(XaiClient),
}
impl Default for ClientType {
fn default() -> Self {
ClientType::from_env()
}
}
impl ClientType {
pub fn from_env() -> Self {
let provider = var("AI_PROVIDER").unwrap_or_else(|_| "gemini".to_string());
#[cfg(feature = "oai")]
if provider == "openai" {
let openai_client = OpenAIClient::new_from_env();
return ClientType::OpenAI(openai_client);
}
#[cfg(feature = "gem")]
if provider == "gemini" || cfg!(not(feature = "oai")) {
let model = var("GEMINI_MODEL").unwrap_or_else(|_| "gemini-2.0-flash".to_string());
let api_key = var("GEMINI_API_KEY").unwrap_or_default();
let gemini_client = GeminiClient::builder().model(&model).build().unwrap();
gemini_client.set_api_key(api_key);
return ClientType::Gemini(gemini_client);
}
#[cfg(feature = "cld")]
if provider == "anthropic" {
let api_key = var("ANTHROPIC_API_KEY").expect("Missing ANTHROPIC_API_KEY");
let client = AnthropicClient::new::<MessageError>(api_key, "2023-06-01")
.expect("Failed to create Anthropic client");
return ClientType::Anthropic(client);
}
#[cfg(feature = "xai")]
if provider == "xai" {
let api_key = var("XAI_API_KEY").expect("Missing XAI_API_KEY");
let client = XaiClient::builder()
.build()
.expect("Failed to build XaiClient");
client.set_api_key(api_key);
return ClientType::Xai(client);
}
#[allow(unreachable_code)]
{
panic!(
"Invalid AI_PROVIDER `{provider}` or missing required feature flags. \
Make sure to enable at least one of: `oai`, `gem`, `cld`, `xai`."
);
}
}
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash, Serialize, Deserialize)]
pub struct Communication {
pub role: Cow<'static, str>,
pub content: Cow<'static, str>,
}
#[derive(Debug, PartialEq, Default, Clone)]
pub enum Status {
#[default]
Idle,
Active,
InUnitTesting,
Completed,
}
#[derive(Eq, Debug, Serialize, Deserialize, Clone, PartialEq, Default, Hash)]
pub struct Route {
pub dynamic: Cow<'static, str>,
pub method: Cow<'static, str>,
pub body: Value,
pub response: Value,
pub path: Cow<'static, str>,
}
#[derive(Eq, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Default, Hash)]
pub struct Scope {
pub crud: bool,
pub auth: bool,
pub external: bool,
}
#[derive(Eq, Debug, Serialize, Deserialize, Clone, PartialEq, Default, Hash)]
pub struct Task {
pub description: Cow<'static, str>,
pub scope: Option<Scope>,
pub urls: Option<Vec<Cow<'static, str>>>,
pub frontend_code: Option<Cow<'static, str>>,
pub backend_code: Option<Cow<'static, str>>,
pub api_schema: Option<Vec<Route>>,
}
impl Task {
pub fn from_payload(payload: &str) -> Self {
Task {
description: payload.to_string().into(),
scope: None,
urls: None,
frontend_code: None,
backend_code: None,
api_schema: None,
}
}
}
pub fn extract_json_string(text: &str) -> Option<String> {
if let Some(start_index) = text.find("{\n \"crud\"") {
let mut end_index = start_index + 1;
let mut open_braces_count = 1;
for (i, c) in text[start_index + 1..].char_indices() {
match c {
'{' => open_braces_count += 1,
'}' => {
open_braces_count -= 1;
if open_braces_count == 0 {
end_index = start_index + i + 2;
break;
}
}
_ => {}
}
}
return Some(text[start_index..end_index].to_string());
}
None
}
pub fn extract_array(text: &str) -> Option<String> {
if text.starts_with('[') && text.ends_with(']') {
Some(text.to_string())
} else if let Some(start_index) = text.find("[\"") {
let mut end_index = start_index + 1;
let mut open_brackets_count = 1;
for (i, c) in text[start_index + 1..].char_indices() {
match c {
'[' => open_brackets_count += 1,
']' => {
open_brackets_count -= 1;
if open_brackets_count == 0 {
end_index = start_index + i + 2;
break;
}
}
_ => {}
}
}
Some(text[start_index..end_index].to_string())
} else {
None
}
}
fn levenshtein_distance(s1: &str, s2: &str) -> usize {
let len1 = s1.chars().count();
let len2 = s2.chars().count();
let mut matrix = vec![vec![0; len2 + 1]; len1 + 1];
for (i, item) in matrix.iter_mut().enumerate().take(len1 + 1) {
item[0] = i;
}
for j in 0..=len2 {
matrix[0][j] = j;
}
for (i, char1) in s1.chars().enumerate() {
for (j, char2) in s2.chars().enumerate() {
let cost = if char1 == char2 { 0 } else { 1 };
matrix[i + 1][j + 1] = (matrix[i][j + 1] + 1)
.min(matrix[i + 1][j] + 1)
.min(matrix[i][j] + cost);
}
}
matrix[len1][len2]
}
pub fn similarity(s1: &str, s2: &str) -> f64 {
let distance = levenshtein_distance(s1, s2) as f64;
let max_length = s1.chars().count().max(s2.chars().count()) as f64;
1.0 - distance / max_length
}
pub fn strip_code_blocks(text: &str) -> String {
if !text.contains("```") {
return text.to_string();
}
let mut inside_block = false;
let mut found_first = false;
let mut result = Vec::new();
for line in text.lines() {
if line.trim_start().starts_with("```") {
if !found_first {
found_first = true;
inside_block = true;
continue;
} else if inside_block {
break;
}
}
if inside_block {
result.push(line);
}
}
result.join("\n")
}
pub fn is_yes(input: &str) -> bool {
matches!(
input.trim().to_lowercase().as_str(),
"yes" | "y" | "si" | "sure" | "ok" | "okay"
)
}
#[cfg(feature = "cli")]
pub async fn run_code(
language: &str,
path: &str,
browse: bool,
) -> Result<Option<std::process::Child>, Box<dyn std::error::Error + Send + Sync>> {
if browse {
let _ = open_browser_with_options(
Browser::Default,
"http://127.0.0.1:8000/docs",
BrowserOptions::new().with_suppress_output(false),
);
}
match language {
"rust" => {
let mut build_command = Command::new("cargo");
build_command
.arg("build")
.arg("--release")
.arg("--verbose")
.current_dir(path);
let build_output = build_command.output()?;
if build_output.status.success() {
let run_output = Command::new("timeout")
.arg("10s")
.arg("cargo")
.arg("run")
.arg("--release")
.arg("--verbose")
.current_dir(path)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
Ok(Some(run_output))
} else {
Err("Rust build failed.".into())
}
}
"python" => {
let run_output = Command::new("sh")
.arg("-c")
.arg(format!(
"timeout {} '.venv/bin/python' -m uvicorn main:app --host 0.0.0.0 --port 8000",
10
))
.current_dir(path)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.expect("Failed to run the backend application");
Ok(Some(run_output))
}
"javascript" => {
let run_output = Command::new("timeout")
.arg("10s")
.arg("node")
.arg("src/index.js")
.current_dir(path)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
Ok(Some(run_output))
}
_ => Err(format!("Unsupported language: {language}").into()),
}
}
#[cfg(feature = "cli")]
pub struct NoLevelFormatter;
#[cfg(feature = "cli")]
impl<S, N> FormatEvent<S, N> for NoLevelFormatter
where
S: Subscriber + for<'a> LookupSpan<'a>,
N: for<'a> FormatFields<'a> + 'static,
{
fn format_event(
&self,
ctx: &FmtContext<'_, S, N>,
mut writer: fmt::format::Writer<'_>,
event: &Event<'_>,
) -> std::fmt::Result {
ctx.format_fields(writer.by_ref(), event)?;
writeln!(writer)
}
}
#[cfg(feature = "cli")]
pub fn setup_logging() -> anyhow::Result<()> {
let file_appender = rolling::daily("logs", "autogpt_log");
let console_layer = fmt::Layer::new()
.compact()
.without_time()
.with_file(false)
.with_line_number(false)
.with_thread_ids(false)
.with_target(false)
.with_writer(std::io::stdout)
.event_format(NoLevelFormatter)
.with_filter(filter::LevelFilter::INFO);
let file_layer = fmt::Layer::new()
.compact()
.with_file(true)
.with_line_number(true)
.with_thread_ids(true)
.with_target(true)
.with_writer(file_appender)
.with_filter(filter::LevelFilter::DEBUG);
let subscriber = Registry::default().with(console_layer).with(file_layer);
tracing::subscriber::set_global_default(subscriber)?;
Ok(())
}
#[cfg(feature = "cli")]
pub async fn ask_to_run_command(
agent: AgentGPT,
language: &str,
workspace: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
if !agent.memory().is_empty() {
warn!(
"{}",
"[*] \"AGI\": Maybe it's time to run the application? (yes/no)"
.bright_yellow()
.bold()
);
let mut input = String::new();
io::stdin().read_line(&mut input)?;
if is_yes(&input) {
info!(
"{}",
"[*] \"AGI\": 🫡 Roger! Running the application..."
.green()
.bold()
);
let result = run_code(language, workspace, true).await;
match result {
Ok(Some(mut child)) => {
let _build_stdout =
child.stdout.take().expect("Failed to capture build stdout");
let mut build_stderr =
child.stderr.take().expect("Failed to capture build stderr");
let mut stderr_output = String::new();
build_stderr.read_to_string(&mut stderr_output)?;
if !stderr_output.trim().is_empty() {
error!(
"{}",
"[*] \"AGI\": Too many bugs found. Consider debugging..."
.bright_red()
.bold()
);
} else {
info!(
"{}",
"[*] \"AGI\": Application built successful..."
.bright_white()
.bold()
);
}
}
Err(e) => {
error!("{}", format!("[*] \"AGI\": Error: {e}").bright_red().bold());
}
_ => {}
}
}
}
Ok(())
}
#[derive(Debug, PartialEq, Clone)]
pub enum Model {
#[cfg(feature = "oai")]
OpenAI(FlagshipModel),
#[cfg(feature = "gem")]
Gemini(GeminiModel),
#[cfg(feature = "cld")]
Claude(String),
#[cfg(feature = "xai")]
Xai(String),
}
impl Default for Model {
fn default() -> Self {
#[cfg(feature = "oai")]
{
Model::OpenAI(FlagshipModel::Gpt4O)
}
#[cfg(all(not(feature = "oai"), feature = "cld"))]
{
return Model::Claude("claude-3-7-sonnet-latest".to_string());
}
#[cfg(all(not(any(feature = "oai", feature = "cld")), feature = "gem"))]
{
return Model::Gemini(GeminiModel::Flash20);
}
#[cfg(all(
not(any(feature = "oai", feature = "cld", feature = "gem")),
feature = "xai"
))]
{
return Model::Xai("grok-beta".to_string());
}
#[cfg(not(any(feature = "oai", feature = "gem", feature = "cld", feature = "xai")))]
{
panic!(
"At least one of the features `oai`, `gem`, `cld`, or `xai` must be enabled for Model::default()"
);
}
}
}
#[derive(Debug, Clone)]
pub enum Message {
#[cfg(feature = "oai")]
OpenAI(ChatMessage),
#[cfg(feature = "gem")]
Gemini(GeminiMessage),
#[cfg(feature = "cld")]
Claude(AnthMessage),
#[cfg(feature = "xai")]
Xai(XaiMessage),
}
impl Default for Message {
fn default() -> Self {
#[cfg(feature = "oai")]
{
Message::OpenAI(ChatMessage::User {
content: ChatMessageContent::Text("Hello".into()),
name: None,
})
}
#[cfg(all(not(feature = "oai"), feature = "cld"))]
{
return Message::Claude(AnthMessage::new_text(Role::User, "Hello"));
}
#[cfg(all(not(any(feature = "oai", feature = "cld")), feature = "gem"))]
{
return Message::Gemini(GeminiMessage::User {
content: Content::Text("Hello".into()),
name: None,
});
}
#[cfg(all(
not(any(feature = "oai", feature = "cld", feature = "gem")),
feature = "xai"
))]
{
return Message::Xai(XaiMessage {
role: "user".to_string(),
content: "Hello".to_string(),
});
}
#[cfg(not(any(feature = "oai", feature = "gem", feature = "cld", feature = "xai")))]
{
panic!(
"At least one of the features `oai`, `gem`, `cld`, or `xai` must be enabled for Message::default()"
);
}
}
}
impl Message {
pub fn from_text(_text: impl Into<String>) -> Self {
#[cfg(feature = "oai")]
{
Message::OpenAI(ChatMessage::User {
content: ChatMessageContent::Text(_text.into()),
name: None,
})
}
#[cfg(all(not(feature = "oai"), feature = "cld"))]
{
return Message::Claude(AnthMessage::new_text(Role::User, _text.into()));
}
#[cfg(all(not(any(feature = "oai", feature = "cld")), feature = "gem"))]
{
return Message::Gemini(GeminiMessage::User {
content: Content::Text(_text.into()),
name: None,
});
}
#[cfg(all(
not(any(feature = "oai", feature = "cld", feature = "gem")),
feature = "xai"
))]
{
return Message::Xai(XaiMessage {
role: "user".to_string(),
content: _text.into(),
});
}
#[cfg(not(any(feature = "oai", feature = "gem", feature = "cld", feature = "xai")))]
{
panic!(
"At least one of the features `oai`, `gem`, `cld`, or `xai` must be enabled for Message::from_text()"
);
}
}
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub enum ToolName {
#[default]
Search,
Browser,
News,
Wiki,
Calc,
Math,
Convert,
Format,
Sheet,
Exec,
Code,
Regex,
Box,
Read,
Write,
Pdf,
Summarize,
Email,
Sms,
Calendar,
Notes,
Translate,
Sentiment,
Entities,
TLDR,
Classify,
ImgGen,
ImgScan,
Transcribe,
VidSum,
VSearch,
Memory,
KB,
Pad,
Shell,
Git,
DB,
API,
Plan,
Spawn,
Judge,
Loop,
Diagram,
Sim,
Finance,
Optimize,
Frontend,
Backend,
Plugin(String),
}
#[derive(Derivative)]
#[derivative(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct Tool {
pub name: ToolName,
pub description: Cow<'static, str>,
#[derivative(Default(value = "noop_tool"))]
pub invoke: fn(&str) -> String,
}
#[derive(Derivative)]
#[derivative(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct Knowledge {
#[derivative(Hash = "ignore")]
pub facts: HashMap<Cow<'static, str>, Cow<'static, str>>,
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct Planner {
pub current_plan: Vec<Goal>,
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct Goal {
pub description: String,
pub priority: u8,
pub completed: bool,
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct Persona {
pub name: Cow<'static, str>,
pub traits: Vec<Cow<'static, str>>,
pub behavior_script: Option<Cow<'static, str>>,
}
#[derive(Derivative)]
#[derivative(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct Reflection {
pub recent_logs: Vec<Cow<'static, str>>,
#[derivative(Default(value = "default_eval_fn"))]
pub evaluation_fn: fn(&dyn Agent) -> Cow<'static, str>,
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct TaskScheduler {
pub scheduled_tasks: Vec<ScheduledTask>,
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct ScheduledTask {
pub time: DateTime<Utc>,
pub task: Task,
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub enum Sensor {
FileWatcher(Cow<'static, str>),
ApiListener(Cow<'static, str>),
#[default]
AudioInput,
Camera,
Custom(Cow<'static, str>),
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash, Serialize, Deserialize)]
pub enum Capability {
#[default]
CodeGen,
UIDesign,
WebSearch,
SQLAccess,
RobotControl,
ApiIntegration,
TextToSpeech,
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub struct ContextManager {
pub recent_messages: Vec<Communication>,
pub focus_topics: Vec<Cow<'static, str>>,
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub enum Objective {
#[default]
Explore,
Defend,
Research,
Assist,
Custom(Cow<'static, str>),
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub enum Position {
#[default]
Frontline,
Support,
Recon,
Strategic,
Custom(Cow<'static, str>),
}
pub fn default_eval_fn(agent: &dyn Agent) -> Cow<'static, str> {
if let Some(planner) = agent.planner() {
let total = planner.current_plan.len();
let completed = planner.current_plan.iter().filter(|g| g.completed).count();
let mut summary = format!(
"\n- Total Goals: {}\n- Completed: {}\n- In Progress: {}\n\nGoals Summary:\n",
total,
completed,
total - completed
);
for (i, goal) in planner.current_plan.iter().enumerate() {
let status = if goal.completed {
"✅ Completed"
} else {
"⏳ In Progress"
};
summary.push_str(&format!("{}. {} [{}]\n", i + 1, goal.description, status));
}
Cow::Owned(summary)
} else {
Cow::Borrowed("No planner available for self-evaluation.")
}
}
pub fn noop_tool(_: &str) -> String {
"default tool output".to_string()
}
#[derive(Eq, Debug, PartialEq, Default, Clone, Hash)]
pub enum OutputKind {
#[default]
Text,
UrlList,
Scope,
}
#[derive(Eq, Debug, PartialEq, Clone, Hash)]
pub enum GenerationOutput {
Text(String),
UrlList(Vec<Cow<'static, str>>),
Scope(Scope),
}
#[cfg(feature = "cli")]
pub fn spinner(label: &str) -> ProgressBar {
let pb = ProgressBar::new_spinner();
pb.set_style(
ProgressStyle::with_template("{prefix:.bold.dim} {spinner:.cyan} {msg}")
.unwrap()
.tick_chars("◑◒◐◓"),
);
pb.set_message(label.to_string());
pb.enable_steady_tick(Duration::from_millis(120));
pb
}
#[derive(Eq, Debug, PartialEq, Clone, Hash, Serialize, Deserialize)]
pub enum AgentMessage {
#[serde(rename = "task")]
Task(Task),
#[serde(rename = "status")]
Status(String),
#[serde(rename = "memory")]
Memory(Vec<Communication>),
#[serde(rename = "capability_advert")]
CapabilityAdvert {
sender_id: String,
capabilities: Vec<Capability>,
},
#[serde(rename = "custom")]
Custom(String),
}
#[cfg(feature = "cli")]
#[allow(unused)]
pub async fn fetch_latest_version() -> Option<String> {
let client = AsyncClient::new(
"autogpt (github.com/kevin-rs/autogpt)",
Duration::from_millis(1000),
)
.ok()?;
let crate_data = client.get_crate("autogpt").await.ok()?;
Some(crate_data.crate_data.max_version)
}
#[cfg(feature = "cli")]
#[allow(unused)]
pub fn is_outdated(current: &str, latest: &str) -> bool {
let current = Version::parse(current).ok();
let latest = Version::parse(latest).ok();
current < latest
}
#[cfg(feature = "cli")]
#[allow(unused)]
pub fn prompt_for_update() {
info!(
"{}",
"🚀 A new version of autogpt is available! Do you want to update? (y/N):"
.bright_yellow()
.bold()
);
print!("> ");
io::stdout().flush().unwrap();
let mut input = String::new();
if io::stdin().read_line(&mut input).is_ok() {
if input.trim().to_lowercase() == "y" {
info!("{}", "🛠️ Updating autogpt...".bright_cyan().bold());
let status = Command::new("cargo")
.args(["install", "autogpt", "--force", "--all-features"])
.status()
.expect("❌ Failed to run cargo install");
if status.success() {
info!("{}", "✅ Successfully updated autogpt!".green().bold());
} else {
error!("{}", "❌ Failed to update autogpt.".red().bold());
}
} else {
info!("{}", "❎ Skipping update.".dimmed());
}
}
}