#![allow(unreachable_code)]
use crate::agents::agent::AgentGPT;
#[cfg(feature = "net")]
use crate::collaboration::Collaborator;
#[allow(unused_imports)]
use crate::common::utils::{
Capability, ClientType, Communication, ContextManager, Goal, Knowledge, Persona, Planner,
Reflection, Route, Scope, Status, Task, TaskScheduler, Tool, strip_code_blocks,
};
use crate::prompts::optimizer::{MODULARIZE_PROMPT, SPLIT_PROMPT};
use crate::traits::agent::Agent;
use crate::traits::functions::{AsyncFunctions, Executor, Functions};
use anyhow::{Result, anyhow};
use auto_derive::Auto;
use colored::*;
use std::borrow::Cow;
use std::env::var;
use std::fs::File;
use std::io::Write;
use std::path::Path;
use tokio::fs;
use tracing::{debug, error, info};
#[cfg(feature = "mem")]
use {
crate::common::memory::load_long_term_memory, crate::common::memory::long_term_memory_context,
crate::common::memory::save_long_term_memory,
};
#[cfg(feature = "oai")]
use {openai_dive::v1::models::FlagshipModel, openai_dive::v1::resources::chat::*};
#[cfg(feature = "cld")]
use anthropic_ai_sdk::types::message::{
ContentBlock, CreateMessageParams, Message as AnthMessage, MessageClient,
RequiredMessageParams, Role,
};
#[cfg(feature = "gem")]
use gems::{
chat::ChatBuilder,
imagen::ImageGenBuilder,
messages::{Content, Message},
models::Model,
stream::StreamBuilder,
traits::CTrait,
};
#[cfg(any(feature = "oai", feature = "gem", feature = "cld", feature = "xai"))]
use crate::traits::functions::ReqResponse;
#[cfg(feature = "xai")]
use x_ai::{
chat_compl::{ChatCompletionsRequestBuilder, Message as XaiMessage},
traits::ChatCompletionsFetcher,
};
use async_trait::async_trait;
#[derive(Debug, Clone, Default, Auto)]
#[allow(dead_code)]
pub struct OptimizerGPT {
pub workspace: Cow<'static, str>,
agent: AgentGPT,
pub language: String,
client: ClientType,
}
impl OptimizerGPT {
#[allow(unused)]
pub async fn new(objective: &'static str, position: &'static str, language: &str) -> Self {
let base_workspace = var("AUTOGPT_WORKSPACE").unwrap_or("workspace/".to_string());
let workspace = format!("{base_workspace}/backend");
if !fs::try_exists(&workspace).await.unwrap_or(false) {
match fs::create_dir_all(&workspace).await {
Ok(_) => debug!("Directory '{}' created successfully!", workspace),
Err(e) => error!("Error creating directory '{}': {}", workspace, e),
}
} else {
debug!("Workspace directory '{}' already exists.", workspace);
}
let mut agent = AgentGPT::new_borrowed(objective, position);
agent.id = agent.position().to_string().into();
let client = ClientType::from_env();
info!(
"{}",
format!("[*] {:?}: 🔧 Optimizer ready!", agent.position())
.bright_white()
.bold()
);
Self {
workspace: workspace.into(),
agent,
client,
language: language.to_string(),
}
}
pub async fn save_module(&self, filename: &str, content: &str) -> Result<()> {
let path = format!("{}/{}", self.workspace, filename);
let parent = Path::new(&path).parent().unwrap();
fs::create_dir_all(parent).await?;
let mut file = File::create(path)?;
file.write_all(content.as_bytes())?;
Ok(())
}
#[allow(unused)]
pub async fn generate_and_track(&mut self, request: &str) -> Result<String> {
let mut response_text = String::new();
#[cfg(any(feature = "oai", feature = "gem", feature = "cld", feature = "xai"))]
{
response_text = self.generate(request).await?;
}
Ok(response_text)
}
}
#[async_trait]
impl Executor for OptimizerGPT {
async fn execute<'a>(
&'a mut self,
tasks: &'a mut Task,
_execute: bool,
_browse: bool,
_max_tries: u64,
) -> Result<()> {
info!(
"{}",
format!(
"[*] {:?}: Executing modularization task",
self.agent.position()
)
.bright_white()
.bold()
);
let file_path = match self.language.as_str() {
"python" => format!("{}/main.py", self.workspace),
"rust" => format!("{}/src/main.rs", self.workspace),
"javascript" => format!("{}/src/index.js", self.workspace),
_ => panic!("Unsupported language."),
};
let original_code = fs::read_to_string(&file_path).await?;
self.agent.add_communication(Communication {
role: Cow::Borrowed("user"),
content: Cow::Owned(format!("Analyzing and modularizing: {file_path}")),
});
#[cfg(feature = "mem")]
self.save_ltm(Communication {
role: Cow::Borrowed("user"),
content: Cow::Owned("Original code sent for modularization".to_string()),
})
.await?;
let prompt = format!("{MODULARIZE_PROMPT}\n\n{original_code}");
let file_list_raw = self.generate_and_track(&prompt).await?;
let filenames: Vec<String> = file_list_raw
.lines()
.filter(|line| {
line.trim().ends_with(".py")
|| line.trim().ends_with(".rs")
|| line.trim().ends_with(".js")
})
.map(|line| line.trim().to_string())
.collect();
for filename in &filenames {
let split_prompt =
format!("{SPLIT_PROMPT}\n\nFilename: {filename}\nContent:\n{original_code}");
let response = self.generate_and_track(&split_prompt).await?;
self.save_module(filename, &response).await?;
self.agent.add_communication(Communication {
role: Cow::Borrowed("assistant"),
content: Cow::Owned(format!("Generated module: {filename}")),
});
#[cfg(feature = "mem")]
self.save_ltm(Communication {
role: Cow::Borrowed("assistant"),
content: Cow::Owned(format!("Saved file: {filename}")),
})
.await?;
}
let imports: String = filenames
.iter()
.map(|f| match self.language.as_str() {
"python" => format!("import {}", f.replace(".py", "").replace("/", ".")),
"rust" => format!("mod {};", f.replace(".rs", "").replace("/", "::")),
"javascript" => format!("import './{f}';"),
_ => String::new(),
})
.collect::<Vec<_>>()
.join("\n");
if !imports.is_empty() {
fs::write(file_path.clone(), &imports).await?;
tasks.backend_code = Some(imports.clone().into());
}
self.agent.update(Status::Completed);
Ok(())
}
}