mubit-sdk 0.7.0

Umbrella Rust SDK for Mubit core/control planes
Documentation
//! MuBit Learn — Closed-loop agentic memory.
//!
//! Unlike Python/JS which can monkey-patch LLM clients, the Rust module
//! provides explicit `enrich_messages()` and `record()` methods that the
//! user calls around their LLM invocation.
//!
//! # Example
//!
//! ```rust,no_run
//! use mubit_sdk::learn::{LearnSession, LearnConfig};
//! use serde_json::json;
//!
//! # async fn example() -> Result<(), Box<dyn std::error::Error>> {
//! let session = LearnSession::new(
//!     LearnConfig::from_env()
//!         .agent_id("my-agent")
//!         .inject_lessons(true)
//!         .auto_reflect(true)
//! ).await;
//!
//! let messages = vec![
//!     json!({"role": "user", "content": "Fix the auth bug"}),
//! ];
//!
//! // Enrich messages with lessons before LLM call
//! let enriched = session.enrich_messages(&messages).await;
//!
//! // ... make your LLM call with `enriched` ...
//!
//! // Record the interaction after LLM call
//! session.record("The fix involves refreshing the token", "gpt-4o", 1500.0).await;
//!
//! // End run (triggers reflection)
//! session.end().await;
//! # Ok(())
//! # }
//! ```

mod cache;
pub mod injection;
mod run_manager;

use std::sync::Arc;
use std::time::Duration;

use serde_json::Value;

use cache::LessonCache;
use injection::{extract_query, inject_context_openai};
use run_manager::RunManagerInner;

/// Configuration for a learn session.
#[derive(Clone, Debug)]
pub struct LearnConfig {
    pub api_key: String,
    pub endpoint: String,
    pub agent_id: String,
    pub user_id: String,
    pub session_id: Option<String>,
    pub inject_lessons: bool,
    pub injection_position: String,
    pub max_token_budget: u32,
    pub entry_types: Vec<String>,
    pub context_sections: Vec<String>,
    pub lane: String,
    pub auto_reflect: bool,
    pub reflect_after_n_calls: Option<u64>,
    pub cache_ttl_seconds: f64,
    pub cache_max_entries: usize,
    pub fail_open: bool,
    /// Include mental_model entries in context retrieval (highest priority).
    pub include_mental_models: bool,
}

impl Default for LearnConfig {
    fn default() -> Self {
        Self {
            api_key: String::new(),
            endpoint: "http://127.0.0.1:3000".to_string(),
            agent_id: "auto".to_string(),
            user_id: String::new(),
            session_id: None,
            inject_lessons: true,
            injection_position: "system".to_string(),
            max_token_budget: 2048,
            entry_types: Vec::new(),
            context_sections: Vec::new(),
            lane: String::new(),
            auto_reflect: true,
            reflect_after_n_calls: None,
            cache_ttl_seconds: 30.0,
            cache_max_entries: 100,
            fail_open: true,
            include_mental_models: true,
        }
    }
}

impl LearnConfig {
    /// Create a config from environment variables.
    pub fn from_env() -> Self {
        let api_key = std::env::var("MUBIT_API_KEY").unwrap_or_default();
        let endpoint = std::env::var("MUBIT_ENDPOINT")
            .unwrap_or_else(|_| "http://127.0.0.1:3000".to_string());
        Self {
            api_key,
            endpoint: endpoint.trim_end_matches('/').to_string(),
            ..Default::default()
        }
    }

    pub fn api_key(mut self, key: impl Into<String>) -> Self {
        self.api_key = key.into();
        self
    }

    pub fn endpoint(mut self, ep: impl Into<String>) -> Self {
        self.endpoint = ep.into().trim_end_matches('/').to_string();
        self
    }

    pub fn agent_id(mut self, id: impl Into<String>) -> Self {
        self.agent_id = id.into();
        self
    }

    pub fn user_id(mut self, id: impl Into<String>) -> Self {
        self.user_id = id.into();
        self
    }

    pub fn session_id(mut self, id: impl Into<String>) -> Self {
        self.session_id = Some(id.into());
        self
    }

    pub fn inject_lessons(mut self, val: bool) -> Self {
        self.inject_lessons = val;
        self
    }

    pub fn injection_position(mut self, pos: impl Into<String>) -> Self {
        self.injection_position = pos.into();
        self
    }

    pub fn max_token_budget(mut self, budget: u32) -> Self {
        self.max_token_budget = budget;
        self
    }

    pub fn entry_types(mut self, types: Vec<String>) -> Self {
        self.entry_types = types;
        self
    }

    pub fn context_sections(mut self, sections: Vec<String>) -> Self {
        self.context_sections = sections;
        self
    }

    pub fn lane(mut self, lane: impl Into<String>) -> Self {
        self.lane = lane.into();
        self
    }

    pub fn auto_reflect(mut self, val: bool) -> Self {
        self.auto_reflect = val;
        self
    }

    pub fn reflect_after_n_calls(mut self, n: u64) -> Self {
        self.reflect_after_n_calls = Some(n);
        self
    }

    pub fn cache_ttl_seconds(mut self, ttl: f64) -> Self {
        self.cache_ttl_seconds = ttl;
        self
    }

    pub fn fail_open(mut self, val: bool) -> Self {
        self.fail_open = val;
        self
    }
}

/// A learn session that provides closed-loop memory for LLM interactions.
///
/// Call [`enrich_messages`] before your LLM call and [`record`] after.
/// Call [`end`] when your run is complete to trigger reflection.
pub struct LearnSession {
    inner: Arc<RunManagerInner>,
    cache: Arc<LessonCache>,
}

impl LearnSession {
    /// Create a new learn session.
    pub async fn new(config: LearnConfig) -> Self {
        let ttl = Duration::from_secs_f64(config.cache_ttl_seconds);
        let max_entries = config.cache_max_entries;

        Self {
            inner: Arc::new(RunManagerInner::new(config)),
            cache: Arc::new(LessonCache::new(ttl, max_entries)),
        }
    }

    /// The auto-generated or configured session ID for this run.
    pub fn session_id(&self) -> &str {
        &self.inner.session_id
    }

    /// Number of LLM calls recorded so far.
    pub fn call_count(&self) -> u64 {
        self.inner.call_count()
    }

    /// Enrich a message array with relevant lessons from MuBit.
    ///
    /// Fetches context via `get_context()` and injects it into the messages.
    /// Returns the enriched messages array (original is not modified).
    ///
    /// If injection is disabled or context retrieval fails (with `fail_open`),
    /// returns the original messages unchanged.
    pub async fn enrich_messages(&self, messages: &[Value]) -> Vec<Value> {
        if !self.inner.config.inject_lessons {
            return messages.to_vec();
        }

        let query = extract_query(messages, 200);
        if query.is_empty() {
            return messages.to_vec();
        }

        // Check cache
        if let Some(cached) = self.cache.get(&self.inner.session_id, &query).await {
            if !cached.is_empty() {
                return inject_context_openai(
                    messages,
                    &cached,
                    &self.inner.config.injection_position,
                );
            }
            return messages.to_vec();
        }

        // Fetch from server
        match self.inner.get_context_http(&query).await {
            Ok(context_block) => {
                self.cache
                    .set(&self.inner.session_id, &query, context_block.clone())
                    .await;
                if !context_block.is_empty() {
                    inject_context_openai(
                        messages,
                        &context_block,
                        &self.inner.config.injection_position,
                    )
                } else {
                    messages.to_vec()
                }
            }
            Err(_) if self.inner.config.fail_open => messages.to_vec(),
            Err(e) => {
                eprintln!("mubit.learn: context retrieval failed: {e}");
                messages.to_vec()
            }
        }
    }

    /// Record an LLM interaction. Increments the call counter and may
    /// trigger periodic reflection.
    pub async fn record(&self, _response_text: &str, _model: &str, _latency_ms: f64) {
        self.inner.increment();
    }

    /// End the session. Triggers reflection if `auto_reflect` is enabled.
    pub async fn end(&self) {
        self.inner.end().await;
    }
}