Skip to main content

cortex_llm/
lib.rs

1//! LLM adapters used by Cortex.
2//!
3//! This crate **must not write memory rows** — it only proposes candidates
4//! that the reflection / memory layers ingest (BUILD_SPEC §8). The shared
5//! shape lives in [`adapter`]; the deterministic CI / fixture adapter lives
6//! in [`replay`]. Hosted backends (Claude, Ollama) plug into the same
7//! [`adapter::LlmAdapter`] trait.
8
9#![deny(unsafe_code, missing_debug_implementations)]
10#![warn(missing_docs)]
11
12pub mod adapter;
13pub mod claude_http;
14pub mod claude_summary;
15pub mod ollama;
16pub mod ollama_http;
17pub mod ollama_summary;
18pub mod openai_compat;
19pub mod replay;
20pub mod sensitivity;
21pub mod summary;
22
23pub use adapter::{
24    blake3_hex, LlmAdapter, LlmError, LlmMessage, LlmRequest, LlmResponse, LlmRole, TokenUsage,
25};
26pub use claude_http::{
27    ClaudeHttpAdapter, CLAUDE_ADAPTER_API_KEY_MISSING_INVARIANT,
28    CLAUDE_ADAPTER_ENDPOINT_REJECTED_INVARIANT, CLAUDE_ADAPTER_MODEL_NOT_ALLOWED_INVARIANT,
29};
30pub use claude_summary::ClaudeSummaryBackend;
31pub use ollama_summary::{canonical_prompt_template_blake3, OllamaSummaryBackend};
32pub use ollama::{
33    validate_config as validate_ollama_config,
34    validate_endpoint_url as validate_ollama_endpoint_url,
35    validate_model_ref as validate_ollama_model_ref, OllamaConfig,
36};
37pub use ollama_http::OllamaHttpAdapter;
38pub use openai_compat::OpenAiCompatAdapter;
39pub use replay::{
40    FixtureFile, FixtureIndex, FixtureMatch, FixtureResponse, IndexEntry, ReplayAdapter,
41};
42pub use sensitivity::{check_remote_prompt_sensitivity, MaxSensitivity, SensitivityGateResult};
43pub use summary::{
44    NoopSummaryBackend, ReplaySummaryBackend, ReplaySummaryFixture, ReplaySummaryFixtureEntry,
45    SourceClaim, SummaryBackend, SummaryError, SummaryRequest, SummaryResponse,
46};