cortexai_cloudflare/lib.rs
1//! # Cloudflare Workers Support for Rust AI Agents
2//!
3//! This crate provides Cloudflare Workers-native AI agent functionality with:
4//! - Multiple LLM provider support (OpenAI, Anthropic, OpenRouter)
5//! - KV store integration for conversation persistence
6//! - Streaming responses using Response streams
7//! - Request/response handling for Worker routes
8//!
9//! ## Quick Start
10//!
11//! ```rust,ignore
12//! use cortexai_cloudflare::{CloudflareAgent, CloudflareConfig, Provider};
13//! use worker::*;
14//!
15//! #[event(fetch)]
16//! async fn main(req: Request, env: Env, _ctx: Context) -> Result<Response> {
17//! let config = CloudflareConfig::builder()
18//! .provider(Provider::Anthropic)
19//! .api_key(env.secret("ANTHROPIC_API_KEY")?.to_string())
20//! .model("claude-3-5-sonnet-20241022")
21//! .build();
22//!
23//! let agent = CloudflareAgent::new(config);
24//!
25//! let response = agent.chat("Hello!").await?;
26//! Response::ok(response.content)
27//! }
28//! ```
29//!
30//! ## With KV Persistence
31//!
32//! ```rust,ignore
33//! use cortexai_cloudflare::{CloudflareAgent, KvStore};
34//!
35//! let kv = env.kv("CONVERSATIONS")?;
36//! let agent = CloudflareAgent::new(config)
37//! .with_kv(KvStore::new(kv));
38//!
39//! // Conversation history is automatically persisted
40//! let response = agent.chat_with_session("session-123", "Hello!").await?;
41//! ```
42
43mod agent;
44mod config;
45mod error;
46mod http;
47mod kv;
48mod streaming;
49
50pub use agent::CloudflareAgent;
51pub use config::{CloudflareConfig, CloudflareConfigBuilder};
52pub use error::CloudflareError;
53pub use http::CloudflareHttpClient;
54pub use kv::KvStore;
55pub use cortexai_llm_client::{Message, Provider, Role};
56pub use streaming::{StreamChunk, StreamingResponse};
57
58/// Re-export worker types for convenience
59pub use worker::{Env, Request, Response, Result};