Rust AI Agents - Fastly Compute
Run AI agents on Fastly Compute@Edge.
This crate provides a Fastly-native implementation using:
cortexai-llm-clientfor request/response logic- Fastly SDK for HTTP requests
Usage
use ;
use ;
Run AI agents on Fastly Compute@Edge.
This crate provides a Fastly-native implementation using:
cortexai-llm-client for request/response logicuse cortexai_fastly::{FastlyAgent, FastlyAgentConfig};
use fastly::{Request, Response};
#[fastly::main]
fn main(req: Request) -> Result<Response, fastly::Error> {
let config = FastlyAgentConfig::new(
"openai",
std::env::var("OPENAI_API_KEY").unwrap(),
"gpt-4o-mini",
);
let mut agent = FastlyAgent::new(config, "llm-backend");
let response = agent.chat("Hello!")?;
Ok(Response::from_body(response.content))
}