1use serde::Deserialize;
2use std::path::PathBuf;
3use anyhow::{Result, Context};
4use log::warn;
5
6#[derive(Debug, Deserialize, Clone)]
7pub struct Config {
8 pub rcon: RconConfig,
9 pub ai: Option<AiConfig>,
10 pub ollama: Option<OllamaConfig>,
11 #[serde(default)]
12 pub server: ServerConfig,
13 #[serde(default)]
14 pub backup: BackupConfig,
15 #[serde(default)]
16 pub notification: NotificationConfig,
17 #[serde(default)]
18 #[allow(dead_code)]
19 pub jvm: JvmConfig,
20}
21
22#[derive(Debug, Deserialize, Clone)]
23pub struct RconConfig {
24 #[serde(default = "default_rcon_host")]
25 pub host: String,
26 #[serde(default = "default_rcon_port")]
27 pub port: u16,
28 pub password: String,
29}
30
31fn default_rcon_host() -> String { "127.0.0.1".to_string() }
32fn default_rcon_port() -> u16 { 25575 }
33
34#[derive(Debug, Deserialize, Clone)]
35pub struct ServerConfig {
36 #[serde(default = "default_jar")]
37 pub jar: String,
38 #[serde(default = "default_min_mem")]
39 pub min_mem: String,
40 #[serde(default = "default_max_mem")]
41 pub max_mem: String,
42 #[serde(default = "default_session_name")]
43 pub session_name: String,
44 #[serde(default = "default_log_file")]
45 pub log_file: String,
46}
47
48fn default_jar() -> String { "fabric-server.jar".to_string() }
49fn default_min_mem() -> String { "512M".to_string() }
50fn default_max_mem() -> String { "1G".to_string() }
51fn default_session_name() -> String { "mc_server".to_string() }
52fn default_log_file() -> String { "logs/latest.log".to_string() }
53
54impl Default for ServerConfig {
55 fn default() -> Self {
56 Self {
57 jar: default_jar(),
58 min_mem: default_min_mem(),
59 max_mem: default_max_mem(),
60 session_name: default_session_name(),
61 log_file: default_log_file(),
62 }
63 }
64}
65
66#[derive(Debug, Deserialize, Clone)]
67pub struct AiConfig {
68 pub api_url: String,
69 pub api_key: String,
70 #[serde(default = "default_model")]
71 pub model: String,
72 #[serde(default = "default_trigger")]
73 pub trigger: String,
74 #[serde(default = "default_max_tokens")]
75 pub max_tokens: u32,
76 #[serde(default = "default_temperature")]
77 pub temperature: f32,
78}
79
80fn default_model() -> String { "gpt-3.5-turbo".to_string() }
81fn default_trigger() -> String { "!".to_string() }
82fn default_max_tokens() -> u32 { 150 }
83fn default_temperature() -> f32 { 0.7 }
84
85#[derive(Debug, Deserialize, Clone)]
86pub struct OllamaConfig {
87 #[serde(default = "default_ollama_enabled")]
88 pub enabled: bool,
89 #[serde(default = "default_ollama_url")]
90 pub url: String,
91 #[serde(default = "default_ollama_model")]
92 pub model: String,
93}
94
95fn default_ollama_enabled() -> bool { false }
96fn default_ollama_url() -> String { "http://localhost:11434/api/generate".to_string() }
97fn default_ollama_model() -> String { "qwen:0.5b".to_string() }
98
99#[derive(Debug, Deserialize, Clone)]
100pub struct BackupConfig {
101 #[serde(default = "default_world_dir")]
102 pub world_dir: String,
103 #[serde(default = "default_backup_dest")]
104 pub backup_dest: String,
105 #[serde(default = "default_retain_days")]
106 pub retain_days: u32,
107}
108
109fn default_world_dir() -> String { "world".to_string() }
110fn default_backup_dest() -> String { "../backups".to_string() }
111fn default_retain_days() -> u32 { 7 }
112
113impl Default for BackupConfig {
114 fn default() -> Self {
115 Self {
116 world_dir: default_world_dir(),
117 backup_dest: default_backup_dest(),
118 retain_days: default_retain_days(),
119 }
120 }
121}
122
123#[derive(Debug, Deserialize, Clone)]
124#[allow(dead_code)]
125pub struct NotificationConfig {
126 #[serde(default)]
127 pub telegram_bot_token: String,
128 #[serde(default)]
129 pub telegram_chat_id: String,
130 #[serde(default = "default_termux_notify")]
131 pub termux_notify: bool,
132}
133
134fn default_termux_notify() -> bool { true }
135
136impl Default for NotificationConfig {
137 fn default() -> Self {
138 Self {
139 telegram_bot_token: String::new(),
140 telegram_chat_id: String::new(),
141 termux_notify: default_termux_notify(),
142 }
143 }
144}
145
146#[derive(Debug, Deserialize, Clone)]
147#[allow(dead_code)]
148pub struct JvmConfig {
149 #[serde(default = "default_gc")]
150 pub gc: String,
151 #[serde(default)]
152 pub extra_flags: String,
153 #[serde(default)]
154 pub xmx: Option<String>,
155 #[serde(default)]
156 pub xms: Option<String>,
157}
158
159fn default_gc() -> String { "G1GC".to_string() }
160
161impl Default for JvmConfig {
162 fn default() -> Self {
163 Self {
164 gc: default_gc(),
165 extra_flags: String::new(),
166 xmx: None,
167 xms: None,
168 }
169 }
170}
171
172impl Config {
173 pub fn load(path: &PathBuf) -> Result<Self> {
174 let content = std::fs::read_to_string(path)
175 .with_context(|| format!("Failed to read config file: {:?}", path))?;
176
177 Self::load_from_str(&content)
178 }
179
180 pub fn load_from_str(content: &str) -> Result<Self> {
181 let mut config: Config = toml::from_str(content)
182 .with_context(|| "Failed to parse config file")?;
183
184 if let Some(ref ai) = config.ai {
186 let using_ollama = config.ollama.as_ref().map(|o| o.enabled).unwrap_or(false);
187
188 if !using_ollama && (ai.api_key.is_empty() || ai.api_url.is_empty()) {
190 warn!("AI configuration incomplete (api_key or api_url is empty) and Ollama is not enabled. AI features will be disabled.");
191 config.ai = None;
192 }
193 }
194
195 Ok(config)
196 }
197
198 pub fn generate_template() -> String {
199 r#"# MC-Minder Configuration File
200# MC-Minder 配置文件
201
202# Server Configuration
203# 服务器配置
204[server]
205jar = "fabric-server.jar"
206min_mem = "512M"
207max_mem = "1G"
208session_name = "mc_server"
209log_file = "logs/latest.log"
210
211# RCON Configuration - Required for MC-Minder to communicate with Minecraft server
212# RCON 配置 - MC-Minder 与 Minecraft 服务器通信必需
213[rcon]
214host = "127.0.0.1"
215port = 25575
216password = "your_rcon_password"
217
218# AI Configuration - Leave empty or remove this section to disable AI features
219# AI 配置 - 留空或删除此部分可禁用 AI 功能
220[ai]
221api_url = ""
222api_key = ""
223model = "gpt-3.5-turbo"
224trigger = "!"
225max_tokens = 150
226temperature = 0.7
227
228# Ollama Configuration - Set enabled = true to use local AI
229# Ollama 配置 - 设置 enabled = true 使用本地 AI
230[ollama]
231enabled = false
232url = "http://localhost:11434/api/generate"
233model = "qwen:0.5b"
234
235# Backup Configuration
236# 备份配置
237[backup]
238world_dir = "world"
239backup_dest = "../backups"
240retain_days = 7
241
242# Notification Configuration - Leave empty to disable notifications
243# 通知配置 - 留空禁用通知功能
244[notification]
245telegram_bot_token = ""
246telegram_chat_id = ""
247termux_notify = true
248
249# JVM Configuration - Advanced JVM tuning options
250# JVM 配置 - 高级 JVM 调优选项
251[jvm]
252gc = "G1GC"
253extra_flags = ""
254# xmx = "2G" # Uncomment to override server.max_mem
255# xms = "512M" # Uncomment to override server.min_mem
256"#.to_string()
257 }
258}