use oris_runtime::{
agent::{create_deep_agent, DeepAgentConfig},
chain::Chain,
prompt_args,
schemas::messages::Message,
};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
env_logger::init();
let workspace = std::env::temp_dir().join("oris_deep_agent_fs_example");
std::fs::create_dir_all(&workspace)?;
std::fs::write(workspace.join("hello.txt"), "Hello, world!\nSecond line.\n")?;
let config = DeepAgentConfig::new()
.with_planning(false)
.with_filesystem(true)
.with_workspace_root(workspace.clone());
let agent = create_deep_agent(
"gpt-4o-mini",
&[],
Some(
"You have access to file system tools in a workspace: ls, read_file, write_file, edit_file. \
Paths are relative to the workspace. Use them to answer the user.",
),
config,
)?;
println!("=== Deep Agent file system tools ===\n");
println!("Workspace: {}\n", workspace.display());
let result = agent
.invoke(prompt_args! {
"messages" => vec![
Message::new_human_message("List files in the workspace and read the contents of hello.txt.")
]
})
.await?;
println!("Response: {}\n", result);
let result2 = agent
.invoke_messages(vec![Message::new_human_message(
"Create a file named summary.txt with the single line: Summary of workspace.",
)])
.await?;
println!("Response 2: {}\n", result2);
let result3 = agent
.invoke_messages(vec![Message::new_human_message(
"In hello.txt, replace 'world' with 'Deep Agent'.",
)])
.await?;
println!("Response 3: {}\n", result3);
Ok(())
}