ggen_ai/
lib.rs

1//! # ggen-ai - LLM integration layer for ggen
2//!
3//! Thin wrapper around genai for ggen with environment support and caching.
4//!
5//! This crate provides a simplified LLM integration layer for ggen, focusing on:
6//! - Environment-based configuration
7//! - Response caching
8//! - Template generation
9//! - SPARQL query generation
10//! - Ontology generation
11//! - Code refactoring assistance
12//!
13//! ## Features
14//!
15//! - **Multi-provider LLM support**: OpenAI, Anthropic, Ollama, Gemini, DeepSeek, xAI/Grok, Groq, Cohere (via genai)
16//! - **Environment-based configuration**: Automatic API key detection and model selection
17//! - **Response caching**: Reduce API costs and latency with intelligent caching
18//! - **Template generation**: Natural language to ggen templates
19//! - **SPARQL query generation**: Intent-based query construction
20//! - **Ontology generation**: Domain descriptions to RDF/OWL
21//! - **Code refactoring**: AI-assisted code improvement suggestions
22//! - **RDF-based CLI generation**: Generate CLI projects from RDF ontologies
23//!
24//! ## Quick Start
25//!
26//! ```rust,no_run
27//! use ggen_ai::{GenAiClient, LlmClient, LlmConfig};
28//!
29//! # async fn example() -> Result<(), Box<dyn std::error::Error>> {
30//! // Initialize client with default configuration
31//! let config = LlmConfig::default();
32//! let client = GenAiClient::new(config)?;
33//!
34//! // Generate response
35//! let response = client.complete("Explain Rust ownership").await?;
36//! println!("{}", response.content);
37//! # Ok(())
38//! # }
39//! ```
40//!
41//! ## Module Organization
42//!
43//! - `cache` - LLM response caching
44//! - `client` - LLM client abstraction
45//! - `config` - Configuration management
46//! - `generators` - Specialized generators (templates, SPARQL, ontologies)
47//! - `providers` - LLM provider implementations
48//! - `prompts` - Prompt templates and builders
49//! - `rdf` - RDF-based CLI generation
50//! - `security` - API key masking and security
51//! - `streaming` - Streaming response support
52//! - `types` - Type definitions
53
54#![deny(warnings)] // Poka-Yoke: Prevent warnings at compile time - compiler enforces correctness
55
56pub mod cache;
57pub mod client;
58pub mod config;
59pub mod constants;
60pub mod error;
61pub mod error_utils;
62pub mod generators;
63pub mod parsing_utils;
64pub mod prompts;
65pub mod providers;
66pub mod rdf;
67pub mod security;
68pub mod streaming;
69pub mod types;
70
71// Test helpers for integrations
72#[cfg(any(
73    test,
74    feature = "ollama-integration",
75    feature = "openai-integration",
76    feature = "anthropic-integration",
77    feature = "live-llm-tests"
78))]
79pub mod test_helpers;
80
81// Re-export main types for convenience
82pub use cache::{CacheConfig, CacheStats, LlmCache};
83pub use client::{GenAiClient, LlmChunk, LlmClient, LlmConfig, LlmResponse, UsageStats};
84pub use config::{get_global_config, init_global_config, AiConfig, GlobalLlmConfig, LlmProvider};
85pub use error::{GgenAiError, Result};
86pub use generators::{
87    NaturalSearchGenerator, OntologyGenerator, QualityMetrics, RefactorAssistant, SparqlGenerator,
88    TemplateGenerator, TemplateValidator, ValidationIssue,
89};
90pub use providers::adapter::{
91    ollama_default_config, ollama_ministral_3b_config, ollama_qwen3_coder_config, MockClient,
92};
93pub use rdf::{
94    Argument, ArgumentType, CliGenerator, CliProject, Dependency, Noun, QueryExecutor, RdfParser,
95    TemplateRenderer, Validation, Verb,
96};
97pub use security::{MaskApiKey, SecretString};
98pub use streaming::StreamConfig;
99pub use types::{DecisionId, PolicyId, RequestId, RuleId};
100
101/// Version information
102pub const VERSION: &str = env!("CARGO_PKG_VERSION");
103
104/// Initialize tracing for the ggen-ai crate
105pub fn init_logging() {
106    use tracing_subscriber::{fmt, EnvFilter};
107
108    if std::env::var("RUST_LOG").is_err() {
109        std::env::set_var("RUST_LOG", "ggen_ai=info");
110    }
111
112    let _ = fmt()
113        .with_env_filter(EnvFilter::from_default_env())
114        .with_target(false)
115        .try_init();
116}