1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
//! LLM-enhanced schema refinement
//!
//! This module provides LLM-based schema refinement capabilities for enhancing
//! automatically inferred schemas with descriptions, formats, and constraints.
//!
//! # Features
//!
//! - **Online Mode**: Connect to Ollama API for LLM inference (requires `llm-online` feature)
//! - **Offline Mode**: Use embedded llama.cpp for local inference (requires `llm-offline` feature)
//! - **Documentation Context**: Load documentation to provide context for refinement
//! - **Validation**: Ensure refined schemas maintain compatibility with originals
//!
//! # Example
//!
//! ```ignore
//! use data_modelling_core::llm::{
//! RefinementConfig, OllamaClient, SchemaRefiner,
//! };
//!
//! // Configure online refinement with Ollama
//! let config = RefinementConfig::with_ollama("llama3.2")
//! .with_documentation_text("Customer database schema")
//! .with_timeout(60);
//!
//! // Create the client and refiner
//! let client = OllamaClient::new("http://localhost:11434", "llama3.2");
//! let refiner = SchemaRefiner::new(client, config);
//!
//! // Refine a schema
//! let original_schema = serde_json::json!({
//! "type": "object",
//! "properties": {
//! "customer_id": {"type": "string"},
//! "email": {"type": "string"}
//! }
//! });
//!
//! let result = refiner.refine(&original_schema, None).await?;
//! println!("Refined schema: {}", result.schema);
//! ```
//!
//! # Feature Flags
//!
//! - `llm-online`: Enable Ollama client for online inference
//! - `llm-offline`: Enable llama.cpp client for offline inference
//!
//! Without either feature, the LLM module provides configuration types and
//! validation, but actual inference will return feature-not-available errors.
// Re-export main types
pub use ;
pub use ;
pub use ;
pub use ;
pub use LlamaCppClient;
pub use OllamaClient;
pub use ;
pub use ;
pub use ;
pub use MockLlmClient;