Skip to main content

phago_llm/
lib.rs

1//! # Phago LLM
2//!
3//! LLM integration for Phago semantic intelligence.
4//!
5//! This crate provides optional LLM backends for enhanced concept extraction,
6//! relationship labeling, and query expansion.
7//!
8//! ## Features
9//!
10//! - `api`: Cloud API backends (Claude, OpenAI)
11//! - `local`: Local backends (Ollama)
12//! - `full`: All backends
13//!
14//! ## Usage
15//!
16//! ```rust,ignore
17//! use phago_llm::{LlmBackend, OllamaBackend};
18//!
19//! let backend = OllamaBackend::new("http://localhost:11434");
20//! let concepts = backend.extract_concepts("Cell membrane transport").await?;
21//! ```
22
23mod backend;
24mod types;
25mod prompt;
26
27pub use backend::{LlmBackend, LlmError, LlmResult};
28pub use types::{Concept, Relationship, ConceptType, RelationType};
29pub use prompt::{PromptTemplate, ConceptPrompt, RelationshipPrompt};
30
31#[cfg(feature = "local")]
32mod ollama;
33#[cfg(feature = "local")]
34pub use ollama::OllamaBackend;
35
36#[cfg(feature = "api")]
37mod claude;
38#[cfg(feature = "api")]
39pub use claude::ClaudeBackend;
40
41#[cfg(feature = "api")]
42mod openai;
43#[cfg(feature = "api")]
44pub use openai::OpenAiBackend;
45
46/// Prelude for convenient imports.
47pub mod prelude {
48    pub use crate::{LlmBackend, LlmError, LlmResult};
49    pub use crate::{Concept, Relationship, ConceptType, RelationType};
50    pub use crate::{PromptTemplate, ConceptPrompt};
51
52    #[cfg(feature = "local")]
53    pub use crate::OllamaBackend;
54
55    #[cfg(feature = "api")]
56    pub use crate::{ClaudeBackend, OpenAiBackend};
57}