Skip to main content

lsp_llm/
lib.rs

1//! `lsp-llm` — opt-in LLM advisor.
2//!
3//! The crate has two faces:
4//!
5//! - **Always-on surface** ([`AskRequest`], [`AskResponse`],
6//!   [`Citation`], [`is_compiled_in`], [`is_runtime_enabled`],
7//!   [`is_active`]). These types ship in every build so the
8//!   `axon-lsp` Backend can deserialise an `axon/askAdvisor` request
9//!   and reply with a structured `MethodNotFound` even when the
10//!   advisor itself is absent.
11//!
12//! - **Feature-gated surface** (`#[cfg(feature = "llm")]`). With
13//!   `--features llm` the [`Advisor`] struct is exposed, plus a
14//!   `try_advisor_from_env` helper. The advisor depends on
15//!   `reqwest` and the embedded `lsp-docs` corpus; both stay out
16//!   of the dep tree on default builds.
17//!
18//! Activation requires **both** axes to be true:
19//! - Compile-time: `--features llm` (lights up [`Advisor`]).
20//! - Runtime: `AXON_LSP_LLM_ENABLED=1` (the Backend opts in).
21//!
22//! If either is missing, `axon/askAdvisor` returns `MethodNotFound`
23//! with an explanatory message. Diagnostics, hover, completion,
24//! and definition keep working at full fidelity — the deterministic
25//! stack never depends on this crate at runtime.
26
27#![allow(clippy::module_name_repetitions)]
28
29use lsp_types::{Range, Url};
30use serde::{Deserialize, Serialize};
31
32/// Custom LSP method name. Both the server and any LSP client that
33/// wants to call the advisor must agree on this string.
34pub const METHOD: &str = "axon/askAdvisor";
35
36/// Environment variable that flips the runtime opt-in.
37pub const ENV_ENABLED: &str = "AXON_LSP_LLM_ENABLED";
38
39/// Environment variable that selects the backend. Today only
40/// `claude` is wired; the slot exists so future backends can plug in
41/// without a schema change.
42pub const ENV_BACKEND: &str = "AXON_LLM_BACKEND";
43
44/// Environment variable that overrides the default Anthropic model.
45pub const ENV_MODEL: &str = "AXON_LLM_MODEL";
46
47/// Environment variable carrying the Anthropic API key when
48/// `AXON_LLM_BACKEND=claude`.
49pub const ENV_ANTHROPIC_KEY: &str = "ANTHROPIC_API_KEY";
50
51/// Optional code context the client can attach to a request.
52#[derive(Debug, Clone, Serialize, Deserialize)]
53pub struct CodeContext {
54    pub uri: Url,
55    pub range: Range,
56    /// The actual text of the selected range. The client extracts
57    /// it from the document so the server doesn't have to re-derive
58    /// it from rope coordinates.
59    #[serde(default)]
60    pub text: String,
61}
62
63/// JSON-RPC request payload for `axon/askAdvisor`.
64#[derive(Debug, Clone, Serialize, Deserialize)]
65pub struct AskRequest {
66    pub question: String,
67    #[serde(default)]
68    pub context: Option<CodeContext>,
69}
70
71/// One citation accompanying an answer. Today the URI is a
72/// synthetic `axon-lsp://docs/{kind}/{name}.md` for documentation
73/// entries, or the source URI when the advisor cited the user's
74/// own code. Editors may not be able to navigate the synthetic
75/// scheme, but the data is structured for future routing.
76#[derive(Debug, Clone, Serialize, Deserialize)]
77pub struct Citation {
78    pub uri: Url,
79    pub range: Range,
80}
81
82/// JSON-RPC response payload for `axon/askAdvisor`.
83#[derive(Debug, Clone, Serialize, Deserialize)]
84pub struct AskResponse {
85    pub answer: String,
86    #[serde(default)]
87    pub citations: Vec<Citation>,
88}
89
90/// True when the binary was compiled with `--features llm`.
91#[must_use]
92pub const fn is_compiled_in() -> bool {
93    cfg!(feature = "llm")
94}
95
96/// True when the runtime opt-in env var is set to `1`. Reads the
97/// process environment on every call — the advisor is rare enough
98/// (one call per user question) that caching adds no value.
99#[must_use]
100pub fn is_runtime_enabled() -> bool {
101    std::env::var(ENV_ENABLED).as_deref() == Ok("1")
102}
103
104/// Whether the advisor is fully active — both compile-time and
105/// runtime gates must say yes. The Backend uses this to decide
106/// between routing through the advisor and replying with
107/// `MethodNotFound`.
108#[must_use]
109pub fn is_active() -> bool {
110    is_compiled_in() && is_runtime_enabled()
111}
112
113#[cfg(feature = "llm")]
114mod advisor;
115
116#[cfg(feature = "llm")]
117pub use advisor::{Advisor, AdvisorError, try_advisor_from_env};
118
119#[cfg(test)]
120mod tests {
121    // Note on env-var tests: Rust 2024 marks `std::env::set_var` /
122    // `remove_var` as `unsafe`, and the workspace forbids unsafe
123    // code. Runtime-flag round-tripping is covered end-to-end by
124    // `crates/axon-lsp/tests/lsp_smoke.rs`, where the child process
125    // gets a controlled environment via `Command::env`. This module
126    // sticks to thread-safe checks — pure flag arithmetic and
127    // serialisation round-trips.
128
129    #[cfg(not(feature = "llm"))]
130    use super::is_active;
131    use super::{AskRequest, is_compiled_in};
132    use serde_json::json;
133
134    #[test]
135    fn flags_match_cfg_default() {
136        assert_eq!(is_compiled_in(), cfg!(feature = "llm"));
137    }
138
139    #[cfg(not(feature = "llm"))]
140    #[test]
141    fn default_build_is_inactive() {
142        // Compile-time gate alone is enough to keep us inactive.
143        assert!(!is_active());
144    }
145
146    #[test]
147    fn ask_request_round_trips_json() {
148        let req: AskRequest = serde_json::from_value(json!({
149            "question": "What is a Trusted<T>?",
150            "context": null,
151        }))
152        .expect("deserialise");
153        assert_eq!(req.question, "What is a Trusted<T>?");
154        assert!(req.context.is_none());
155        let v = serde_json::to_value(&req).expect("serialise");
156        assert_eq!(v["question"], "What is a Trusted<T>?");
157    }
158}