Skip to main content

tirami_core/
config.rs

1use serde::{Deserialize, Serialize};
2use std::path::PathBuf;
3
4#[derive(Debug, Clone, Serialize, Deserialize)]
5#[serde(default)]
6pub struct Config {
7    /// Path to the local GGUF model file.
8    pub model_path: Option<PathBuf>,
9
10    /// Optional path to a persisted ledger snapshot.
11    pub ledger_path: Option<PathBuf>,
12
13    /// Optional path to the persisted forge-bank (L2) state.
14    pub bank_state_path: Option<PathBuf>,
15
16    /// Optional path to the persisted forge-agora (L4) marketplace state.
17    pub marketplace_state_path: Option<PathBuf>,
18
19    /// Optional path to the persisted forge-mind (L3) agent snapshot.
20    pub mind_state_path: Option<PathBuf>,
21
22    /// Whether to share compute with the network.
23    pub share_compute: bool,
24
25    /// Maximum memory (GB) to dedicate to inference.
26    pub max_memory_gb: f32,
27
28    /// Port for the local HTTP API.
29    pub api_port: u16,
30
31    /// Bind address for the local HTTP API.
32    pub api_bind_addr: String,
33
34    /// Optional bearer token protecting administrative API endpoints.
35    pub api_bearer_token: Option<String>,
36
37    /// Maximum accepted HTTP request body size for the local API.
38    pub api_max_request_body_bytes: usize,
39
40    /// Bootstrap relay addresses for WAN discovery.
41    pub bootstrap_relays: Vec<String>,
42
43    /// Region hint for peer discovery.
44    pub region: String,
45
46    /// Maximum accepted prompt length for API and remote inference requests.
47    pub max_prompt_chars: usize,
48
49    /// Maximum number of tokens a single request may ask the runtime to generate.
50    pub max_generate_tokens: u32,
51
52    /// Maximum number of concurrent remote inference requests the seed will execute.
53    pub max_concurrent_remote_inference_requests: usize,
54
55    /// Settlement window duration in hours (Issue #19). 0 = manual only.
56    pub settlement_window_hours: u64,
57}
58
59impl Config {
60    pub fn api_socket_addr(&self) -> String {
61        format!("{}:{}", self.api_bind_addr, self.api_port)
62    }
63
64    pub fn validate_inference_request(
65        &self,
66        prompt: &str,
67        max_tokens: u32,
68        temperature: f32,
69        top_p: Option<f32>,
70    ) -> Result<(), crate::TiramiError> {
71        let prompt_chars = prompt.chars().count();
72        if prompt_chars == 0 {
73            return Err(crate::TiramiError::InvalidRequest(
74                "prompt must not be empty".to_string(),
75            ));
76        }
77        if prompt_chars > self.max_prompt_chars {
78            return Err(crate::TiramiError::InvalidRequest(format!(
79                "prompt too large: {prompt_chars} chars > limit {}",
80                self.max_prompt_chars
81            )));
82        }
83        if max_tokens == 0 {
84            return Err(crate::TiramiError::InvalidRequest(
85                "max_tokens must be greater than zero".to_string(),
86            ));
87        }
88        if max_tokens > self.max_generate_tokens {
89            return Err(crate::TiramiError::InvalidRequest(format!(
90                "max_tokens too large: {max_tokens} > limit {}",
91                self.max_generate_tokens
92            )));
93        }
94        if !temperature.is_finite() || !(0.0..=2.0).contains(&temperature) {
95            return Err(crate::TiramiError::InvalidRequest(
96                "temperature must be finite and within 0.0..=2.0".to_string(),
97            ));
98        }
99        if let Some(top_p) = top_p {
100            if !top_p.is_finite() || !(0.0..=1.0).contains(&top_p) || top_p == 0.0 {
101                return Err(crate::TiramiError::InvalidRequest(
102                    "top_p must be finite and within (0.0, 1.0]".to_string(),
103                ));
104            }
105        }
106
107        Ok(())
108    }
109}
110
111impl Default for Config {
112    fn default() -> Self {
113        Self {
114            model_path: None,
115            ledger_path: None,
116            bank_state_path: None,
117            marketplace_state_path: None,
118            mind_state_path: None,
119            share_compute: false,
120            max_memory_gb: 4.0,
121            api_port: 3000,
122            api_bind_addr: "127.0.0.1".to_string(),
123            api_bearer_token: None,
124            api_max_request_body_bytes: 64 * 1024,
125            bootstrap_relays: vec![],
126            region: "unknown".to_string(),
127            max_prompt_chars: 8_192,
128            max_generate_tokens: 1_024,
129            max_concurrent_remote_inference_requests: 4,
130            settlement_window_hours: 24,
131        }
132    }
133}