lean-ctx 3.1.5

Context Runtime for AI Agents with CCP. 42 MCP tools, 10 read modes, 90+ compression patterns, cross-session memory (CCP), persistent AI knowledge with temporal facts + contradiction detection, multi-agent context sharing + diaries, LITM-aware positioning, AAAK compact format, adaptive compression with Thompson Sampling bandits. Supports 24 AI tools. Reduces LLM token consumption by up to 99%.
Documentation
use axum::extract::State;
use axum::http::StatusCode;
use axum::Json;
use serde::Serialize;

use super::auth::AppState;
use super::helpers::internal_error;

#[derive(Serialize)]
pub struct GlobalStatsResponse {
    pub total_tokens_saved: i64,
    pub total_users: i64,
    pub total_contributions: i64,
    pub total_teams: i64,
}

pub async fn get_global_stats(
    State(state): State<AppState>,
) -> Result<Json<GlobalStatsResponse>, (StatusCode, String)> {
    let client = state.pool.get().await.map_err(internal_error)?;

    let tokens_saved: i64 = client
        .query_one(
            "SELECT COALESCE(SUM(total_tokens_saved), 0)::BIGINT FROM user_profiles",
            &[],
        )
        .await
        .map_err(internal_error)?
        .get(0);

    let total_users: i64 = client
        .query_one("SELECT COUNT(*) FROM users", &[])
        .await
        .map_err(internal_error)?
        .get(0);

    let total_contributions: i64 = client
        .query_one("SELECT COUNT(*) FROM contribute_entries", &[])
        .await
        .map_err(internal_error)?
        .get(0);

    let total_teams: i64 = client
        .query_one("SELECT COUNT(*) FROM teams", &[])
        .await
        .map_err(internal_error)?
        .get(0);

    Ok(Json(GlobalStatsResponse {
        total_tokens_saved: tokens_saved,
        total_users,
        total_contributions,
        total_teams,
    }))
}