mecha10_ai_llm/
lib.rs

1//! Mecha10 AI - Large Language Models
2//!
3//! This crate provides LLM integration as `BehaviorNode` implementations.
4//!
5//! # Supported Providers
6//!
7//! - **Claude** (Anthropic) - Claude 3.5 Sonnet, Claude 3 Opus, etc.
8//! - **OpenAI** - GPT-4, GPT-4 Turbo, GPT-3.5
9//! - **Local** - Ollama, Llama.cpp (HTTP API)
10//!
11//! # Example
12//!
13//! ```rust,no_run
14//! use mecha10_ai_llm::prelude::*;
15//!
16//! # async fn example(ctx: &Context) -> anyhow::Result<()> {
17//! // Create Claude LLM node
18//! let mut llm = LlmNode::claude()
19//!     .with_model("claude-3-5-sonnet-20241022")
20//!     .with_system_prompt("You are a helpful robot assistant.")
21//!     .with_api_key(std::env::var("ANTHROPIC_API_KEY")?)
22//!     .build()?;
23//!
24//! // Ask a question
25//! llm.add_message("user", "What should I do next?");
26//! let status = llm.tick(ctx).await?;
27//!
28//! if let Some(response) = llm.last_response() {
29//!     println!("LLM: {}", response);
30//! }
31//! # Ok(())
32//! # }
33//! ```
34
35mod llm_node;
36mod providers;
37mod types;
38
39// Re-export main types
40pub use llm_node::{LlmNode, LlmNodeBuilder};
41pub use providers::{ClaudeProvider, LlmProvider, OpenAIProvider};
42pub use types::{LlmConfig, LlmMessage, LlmResponse, LlmUsage};
43
44/// Prelude module for convenient imports
45pub mod prelude {
46    pub use crate::llm_node::{LlmNode, LlmNodeBuilder};
47    pub use crate::providers::{ClaudeProvider, LlmProvider, OpenAIProvider};
48    pub use crate::types::{LlmConfig, LlmMessage, LlmResponse, LlmUsage};
49    pub use mecha10_behavior_runtime::prelude::*;
50}