1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
//! Apprentice-lib is a library that allows to create agent applications.
//! It allows to create a chat with an LLM and use tools/functions.
//!
//! ### Features
//!
//! - several providers
//! - light-weight
//! - configurable
//! - extensible
//!
//! ### Providers
//!
//! - Anthropic (Claude models)
//! - OpeanAI (GPT models)
//! - Google Cloud Platform (Gemini)
//!
//! ### Examples
//!
//! ```rust no_run
//! use apprentice_lib::llm::{get_llm_chat, Message, Role};
//! use apprentice_lib::tools::ToolChoice;
//! use apprentice_lib::request::get_reqwest_client;
//! use apprentice_lib::ModelProvider;
//! use apprentice_lib::Config;
//!
//! let config = Config::new(ModelProvider::OpenAI, "gpt-4".into(), "<api-key>".into(), "https://api.openai.com/v1/chat/completions".into());
//!
//! let reqwest_client = get_reqwest_client().expect("transport created");
//!
//! let mut chat = get_llm_chat(config, reqwest_client, vec![]).expect("chat created");
//!
//! chat.set_system_prompt("You are a helpful assistant.".into());
//!
//! let user_message = Message::text(Role::User, "Hi assistant!".into());
//!
//! let response = chat.get_inference(&[user_message], ToolChoice::None).expect("LLM response");
//!
//! for message in response.iter() {
//! match message {
//! Message::Text(text) => { /* process text message */ }
//! Message::ToolCall(tool_call) => { /* process tool use request */ }
//! Message::ToolResult(_) => { panic!("LLM must not respond with tool result!") }
//! };
//! }
//! ```
pub use Error;
pub use Config;
pub use ModelProvider;