apprentice_lib/
lib.rs

1//! Apprentice-lib is a library that allows to create agent applications.
2//! It allows to create a chat with an LLM and use tools/functions.
3//! 
4//! ### Features
5//! 
6//!  - several providers
7//!  - light-weight
8//!  - configurable
9//!  - extensible
10//! 
11//! ### Providers
12//! 
13//! - Anthropic (Claude models)
14//! - OpeanAI (GPT models)
15//! - Google Cloud Platform (Gemini)
16//! 
17//! ### Examples
18//! 
19//! ```rust no_run
20//! use apprentice_lib::llm::{get_llm_chat, Message, Role};
21//! use apprentice_lib::tools::ToolChoice;
22//! use apprentice_lib::request::get_reqwest_client;
23//! use apprentice_lib::ModelProvider;
24//! use apprentice_lib::Config;
25//!
26//! let config = Config::new(ModelProvider::OpenAI, "gpt-4".into(), "<api-key>".into(), "https://api.openai.com/v1/chat/completions".into());
27//! 
28//! let reqwest_client = get_reqwest_client().expect("transport created");
29//! 
30//! let mut chat = get_llm_chat(config, reqwest_client, vec![]).expect("chat created");
31//! 
32//! chat.set_system_prompt("You are a helpful assistant.".into());
33//! 
34//! let user_message = Message::text(Role::User, "Hi assistant!".into());
35//! 
36//! let response = chat.get_inference(&[user_message], ToolChoice::None).expect("LLM response");
37//!
38//! for message in response.iter() {
39//!     match message {
40//!         Message::Text(text) => { /* process text message */ }
41//!         Message::ToolCall(tool_call) => { /* process tool use request */ }
42//!         Message::ToolResult(_) => { panic!("LLM must not respond with tool result!") }
43//!     };
44//! }
45//! ```
46
47#![deny(missing_docs)]
48#![deny(clippy::suspicious)]
49#![allow(clippy::comparison_chain)]
50#![allow(clippy::collapsible_else_if)]
51#![allow(clippy::collapsible_if)]
52
53mod error;
54mod config;
55pub mod llm;
56pub mod tools;
57pub mod request;
58
59pub use error::Error;
60pub use config::Config;
61pub use config::ModelProvider;