anchor_chain/
lib.rs

1//! # Anchor Chain
2//!
3//! Anchor Chain is a Rust framework designed to simplify the orchestration of workflows involving
4//! Large Language Models (LLMs). Inspired by LangChain, Anchor Chain provides a set of easy-to-use
5//! and extensible building blocks that enable developers to create robust and efficient LLM-based
6//! applications quickly. The framework prioritizes type safety, processing efficiency, and
7//! flexibility through its carefully designed APIs and abstractions.
8//!
9//! ## Features
10//!
11//! - Statically Typed Chains: Anchor Chain leverages Rust's type system to provide statically
12//!   typed chains, catching potential type mismatches at compile time.
13//!
14//! - Async Runtime for Parallel Execution: Built with Rust's async runtime, Anchor Chain allows
15//!   for efficient parallel processing of nodes in complex chains.
16//!
17//! - Extensibility through the Node Trait: The Node trait allows developers to create custom
18//!   nodes tailored to their specific use cases, enabling seamless integration into the chain.
19//!
20//! - Support for Popular LLMs: Anchor Chain provides built-in support for popular LLMs, such as
21//!   OpenAI's GPT models and Anthropic Claude, abstracting away API details to provide a common
22//!   interface.
23//!
24//! - Parallel Node Execution: The ParallelNode struct enables parallel execution of multiple
25//!   nodes, leveraging concurrency to improve overall chain performance.
26//!
27//! ## Getting Started
28//!
29//! To get started with Anchor Chain, add the following dependency to your Cargo.toml file:
30//!
31//! ```toml
32//! [dependencies]
33//! anchor-chain = "0.1.0"
34//! ```
35//!
36//! Then, you can create chains using the ChainBuilder and invoke them with the .process()
37//! function. Any node can be added to the chain using the link() function which
38//! will execute the node in the order it was added.
39//!
40//! ```rust,no_run
41//! #[tokio::main]
42//! async fn main() {
43//!     use anchor_chain::{
44//!         chain::ChainBuilder,
45//!         models::openai::OpenAIModel,
46//!     };
47//!
48//!     let chain = ChainBuilder::new()
49//!         .link(OpenAIModel::new_gpt3_5_turbo("You are a helpful assistant".to_string()).await)
50//!         .build();
51//!
52//!     let result = chain
53//!         .process("Write a hello world program in Rust")
54//!         .await
55//!         .expect("Error processing chain");
56//!
57//!     println!("Result: {}", result);
58//! }
59//! ```
60//!
61//! Prompts can be constructed using the `Prompt` struct. `Prompt` uses
62//! [Tera](https://keats.github.io/tera/docs/#templates) templating to allow
63//! for dynamic input substitution. Tera's syntax is based on Jinja2 and Django
64//! templates. Context variables are passed to the prompt using a HashMap.
65//!
66//! ```rust,no_run
67//! use std::collections::HashMap;
68//!
69//! #[tokio::main]
70//! async fn main() {
71//!     use anchor_chain::{
72//!         chain::ChainBuilder,
73//!         models::openai::OpenAIModel,
74//!         nodes::prompt::Prompt,
75//!     };
76//!
77//!     let chain = ChainBuilder::new()
78//!         .link(Prompt::new("{{ input }}"))
79//!         .link(OpenAIModel::new_gpt3_5_turbo("You are a helpful assistant".to_string()).await)
80//!         .build();
81//!
82//!     let result = chain
83//!         .process(HashMap::from([("input".to_string(), "Write a hello world program in Rust".to_string())]))
84//!         .await
85//!         .expect("Error processing chain");
86//!
87//!     println!("Result: {}", result);
88//! }
89//! ```
90//!
91//! For more examples please refer to the [examples
92//! directory](https://github.com/emersonmde/anchor-chain/tree/main/examples).
93
94#[cfg(doctest)]
95#[doc = include_str!("../README.md")]
96struct _README;
97
98pub mod chain;
99mod error;
100mod link;
101pub mod message;
102pub mod models;
103pub mod node;
104pub mod nodes;
105pub mod parallel_node;
106pub mod vector;
107
108pub use error::AnchorChainError;
109pub use link::Link;
110
111pub use crate::models::claude_3::Claude3Bedrock;
112pub use crate::models::openai::OpenAIModel;
113pub use crate::node::NoOpNode;
114pub use crate::node::Node;
115pub use crate::nodes::logger::Logger;
116pub use crate::nodes::prompt::Prompt;
117pub use crate::parallel_node::to_boxed_future;
118pub use crate::parallel_node::ParallelNode;
119pub use crate::vector::document::Document;
120pub use chain::ChainBuilder;
121
122pub use crate::models::openai::OpenAIChatModel;
123pub use crate::models::openai::OpenAIEmbeddingModel;
124pub use crate::models::openai::OpenAIInstructModel;
125pub use crate::vector::opensearch_client_builder::OpenSearchClientBuilder;
126pub use crate::vector::opensearch_indexer::OpenSearchIndexer;
127pub use crate::vector::opensearch_retriever::OpenSearchRetriever;