Skip to main content

mc_minder/config/
mod.rs

1use serde::Deserialize;
2use std::path::PathBuf;
3use anyhow::{Result, Context};
4use log::{warn, debug};
5
6#[derive(Debug, Deserialize, Clone)]
7pub struct Config {
8    pub rcon: RconConfig,
9    pub ai: Option<AiConfig>,
10    pub ollama: Option<OllamaConfig>,
11    #[serde(default)]
12    pub server: ServerConfig,
13    #[serde(default)]
14    pub backup: BackupConfig,
15    #[serde(default)]
16    pub notification: NotificationConfig,
17    #[serde(default)]
18    #[allow(dead_code)]
19    pub jvm: JvmConfig,
20}
21
22#[derive(Debug, Deserialize, Clone)]
23pub struct RconConfig {
24    #[serde(default = "default_rcon_host")]
25    pub host: String,
26    #[serde(default = "default_rcon_port")]
27    pub port: u16,
28    pub password: String,
29}
30
31fn default_rcon_host() -> String { "127.0.0.1".to_string() }
32fn default_rcon_port() -> u16 { 25575 }
33
34#[derive(Debug, Deserialize, Clone)]
35pub struct ServerConfig {
36    #[serde(default = "default_jar")]
37    pub jar: String,
38    #[serde(default = "default_min_mem")]
39    pub min_mem: String,
40    #[serde(default = "default_max_mem")]
41    pub max_mem: String,
42    #[serde(default = "default_session_name")]
43    pub session_name: String,
44    #[serde(default = "default_log_file")]
45    pub log_file: String,
46}
47
48fn default_jar() -> String { "fabric-server.jar".to_string() }
49fn default_min_mem() -> String { "512M".to_string() }
50fn default_max_mem() -> String { "1G".to_string() }
51fn default_session_name() -> String { "mc_server".to_string() }
52fn default_log_file() -> String { "logs/latest.log".to_string() }
53
54impl Default for ServerConfig {
55    fn default() -> Self {
56        Self {
57            jar: default_jar(),
58            min_mem: default_min_mem(),
59            max_mem: default_max_mem(),
60            session_name: default_session_name(),
61            log_file: default_log_file(),
62        }
63    }
64}
65
66#[derive(Debug, Deserialize, Clone)]
67pub struct AiConfig {
68    #[serde(default)]
69    pub api_url: String,
70    #[serde(default)]
71    pub api_key: String,
72    #[serde(default = "default_model")]
73    pub model: String,
74    #[serde(default = "default_trigger")]
75    pub trigger: String,
76    #[serde(default = "default_max_tokens")]
77    pub max_tokens: u32,
78    #[serde(default = "default_temperature")]
79    pub temperature: f32,
80}
81
82impl Default for AiConfig {
83    fn default() -> Self {
84        Self {
85            api_url: String::new(),
86            api_key: String::new(),
87            model: default_model(),
88            trigger: default_trigger(),
89            max_tokens: default_max_tokens(),
90            temperature: default_temperature(),
91        }
92    }
93}
94
95fn default_model() -> String { "gpt-3.5-turbo".to_string() }
96fn default_trigger() -> String { "!".to_string() }
97fn default_max_tokens() -> u32 { 150 }
98fn default_temperature() -> f32 { 0.7 }
99
100#[derive(Debug, Deserialize, Clone)]
101pub struct OllamaConfig {
102    #[serde(default = "default_ollama_enabled")]
103    pub enabled: bool,
104    #[serde(default = "default_ollama_url")]
105    pub url: String,
106    #[serde(default = "default_ollama_model")]
107    pub model: String,
108}
109
110fn default_ollama_enabled() -> bool { false }
111fn default_ollama_url() -> String { "http://localhost:11434/api/generate".to_string() }
112fn default_ollama_model() -> String { "qwen:0.5b".to_string() }
113
114#[derive(Debug, Deserialize, Clone)]
115pub struct BackupConfig {
116    #[serde(default = "default_world_dir")]
117    pub world_dir: String,
118    #[serde(default = "default_backup_dest")]
119    pub backup_dest: String,
120    #[serde(default = "default_retain_days")]
121    pub retain_days: u32,
122}
123
124fn default_world_dir() -> String { "world".to_string() }
125fn default_backup_dest() -> String { "../backups".to_string() }
126fn default_retain_days() -> u32 { 7 }
127
128impl Default for BackupConfig {
129    fn default() -> Self {
130        Self {
131            world_dir: default_world_dir(),
132            backup_dest: default_backup_dest(),
133            retain_days: default_retain_days(),
134        }
135    }
136}
137
138#[derive(Debug, Deserialize, Clone)]
139#[allow(dead_code)]
140pub struct NotificationConfig {
141    #[serde(default)]
142    pub telegram_bot_token: String,
143    #[serde(default)]
144    pub telegram_chat_id: String,
145    #[serde(default = "default_termux_notify")]
146    pub termux_notify: bool,
147}
148
149fn default_termux_notify() -> bool { true }
150
151impl Default for NotificationConfig {
152    fn default() -> Self {
153        Self {
154            telegram_bot_token: String::new(),
155            telegram_chat_id: String::new(),
156            termux_notify: default_termux_notify(),
157        }
158    }
159}
160
161#[derive(Debug, Deserialize, Clone)]
162#[allow(dead_code)]
163pub struct JvmConfig {
164    #[serde(default = "default_gc")]
165    pub gc: String,
166    #[serde(default)]
167    pub extra_flags: String,
168    #[serde(default)]
169    pub xmx: Option<String>,
170    #[serde(default)]
171    pub xms: Option<String>,
172    #[serde(default)]
173    pub jdk_path: Option<String>,
174}
175
176fn default_gc() -> String { "G1GC".to_string() }
177
178impl Default for JvmConfig {
179    fn default() -> Self {
180        Self {
181            gc: default_gc(),
182            extra_flags: String::new(),
183            xmx: None,
184            xms: None,
185            jdk_path: None,
186        }
187    }
188}
189
190impl Config {
191    pub fn load(path: &PathBuf) -> Result<Self> {
192        let content = std::fs::read_to_string(path)
193            .with_context(|| format!("Failed to read config file: {:?}", path))?;
194        
195        Self::load_from_str(&content)
196    }
197
198    pub fn load_from_str(content: &str) -> Result<Self> {
199        let mut config: Config = toml::from_str(content)
200            .with_context(|| "Failed to parse config file")?;
201        
202        // 检查 AI 配置
203        let using_ollama = config.ollama.as_ref().map(|o| o.enabled).unwrap_or(false);
204        
205        if let Some(ref mut ai) = config.ai {
206            if using_ollama {
207                // Ollama 模式:确保 ollama URL 有效
208                if let Some(ref mut ollama) = config.ollama {
209                    if ollama.url.is_empty() {
210                        ollama.url = "http://localhost:11434".to_string();
211                    }
212                }
213                debug!("AI mode: Ollama (model: {})", 
214                    config.ollama.as_ref().map(|o| o.model.clone()).unwrap_or_default());
215            } else {
216                // OpenAI 模式:检查必需字段
217                if ai.api_key.is_empty() || ai.api_url.is_empty() {
218                    warn!("AI configuration incomplete for OpenAI mode (api_key or api_url is empty). AI features will be disabled.");
219                    config.ai = None;
220                } else {
221                    debug!("AI mode: OpenAI-compatible (model: {})", ai.model);
222                }
223            }
224        } else if using_ollama {
225            // 用户启用了 Ollama 但没有 [ai] 部分,创建默认的 AiConfig
226            debug!("Ollama enabled but no [ai] section found, creating default AI config");
227            config.ai = Some(AiConfig::default());
228        }
229        
230        Ok(config)
231    }
232
233    pub fn generate_template() -> String {
234        // Minimal, safe template with jdk_path option (non-raw string for stability)
235        let s = "# MC-Minder Configuration File\n[jvm]\ngc = \"G1GC\"\nextra_flags = \"\"\njdk_path = \"\"  # Optional: Custom JDK path\n# xmx = \"2G\"  # Uncomment to override server.max_mem\n# xms = \"512M\"  # Uncomment to override server.min_mem\n";
236        s.to_string()
237    }
238}