Skip to main content

astrid_tools/
spark.rs

1//! Spark identity configuration — the agent's living self-description.
2//!
3//! The spark is a compact identity file (`spark.toml`) that defines the agent's
4//! name, role, personality, communication style, and core philosophy. It can be
5//! seeded by the user via `[spark]` in `config.toml` and evolved by the agent
6//! itself via the `spark` built-in tool.
7//!
8//! Read priority: `spark.toml` > `[spark]` in config > empty (default behavior).
9//! Write target: `spark.toml` only (agent never touches `config.toml`).
10
11use std::io::Write;
12use std::path::Path;
13
14use serde::{Deserialize, Serialize};
15
16use crate::ToolError;
17
18/// Maximum length for short fields (callsign, class, aura, signal).
19const MAX_SHORT_FIELD_LEN: usize = 100;
20
21/// Maximum length for the core field.
22const MAX_CORE_LEN: usize = 2000;
23
24/// Maximum spark file size (64 KB).
25const MAX_SPARK_FILE_SIZE: u64 = 64 * 1024;
26
27/// Agent identity configuration.
28///
29/// All fields default to empty strings. When all fields are empty, the agent
30/// uses the default "Astrid" identity (zero behavior change).
31#[derive(Debug, Clone, Default, Serialize, Deserialize)]
32#[serde(default)]
33pub struct SparkConfig {
34    /// Agent's name (e.g. "Stellar", "Nova", "Orion").
35    pub callsign: String,
36    /// Role archetype (e.g. "navigator", "engineer", "sentinel").
37    pub class: String,
38    /// Personality energy (e.g. "calm", "sharp", "warm", "analytical").
39    pub aura: String,
40    /// Communication style (e.g. "formal", "concise", "casual", "poetic").
41    pub signal: String,
42    /// Soul/philosophy — free-form values, learned patterns, personality depth.
43    pub core: String,
44}
45
46impl SparkConfig {
47    /// Returns `true` when all fields are empty (no identity configured).
48    #[must_use]
49    pub fn is_empty(&self) -> bool {
50        self.callsign.is_empty()
51            && self.class.is_empty()
52            && self.aura.is_empty()
53            && self.signal.is_empty()
54            && self.core.is_empty()
55    }
56
57    /// Sanitize all fields: trim whitespace, enforce length limits, and
58    /// remove newlines from short fields. Whitespace-only fields become empty.
59    pub fn sanitize(&mut self) {
60        sanitize_short_field(&mut self.callsign);
61        sanitize_short_field(&mut self.class);
62        sanitize_short_field(&mut self.aura);
63        sanitize_short_field(&mut self.signal);
64        // Core allows newlines but is length-limited
65        let trimmed = self.core.trim().to_string();
66        if trimmed.len() > MAX_CORE_LEN {
67            // Truncate at a char boundary
68            self.core = truncate_at_char_boundary(&trimmed, MAX_CORE_LEN);
69        } else {
70            self.core = trimmed;
71        }
72    }
73
74    /// Load a spark config from a TOML file.
75    ///
76    /// Returns `None` if the file is missing, too large, or cannot be parsed.
77    #[must_use]
78    pub fn load_from_file(path: &Path) -> Option<Self> {
79        let metadata = std::fs::metadata(path).ok()?;
80        if metadata.len() > MAX_SPARK_FILE_SIZE {
81            return None;
82        }
83        let contents = std::fs::read_to_string(path).ok()?;
84        let mut config: Self = toml::from_str(&contents).ok()?;
85        config.sanitize();
86        Some(config)
87    }
88
89    /// Save the spark config to a TOML file.
90    ///
91    /// Creates parent directories if they don't exist. Sanitizes before writing.
92    ///
93    /// # Errors
94    ///
95    /// Returns a [`ToolError`] if directory creation or file writing fails.
96    pub fn save_to_file(&self, path: &Path) -> Result<(), ToolError> {
97        let mut sanitized = self.clone();
98        sanitized.sanitize();
99        if let Some(parent) = path.parent() {
100            std::fs::create_dir_all(parent)?;
101        }
102        let toml_str =
103            toml::to_string_pretty(&sanitized).map_err(|e| ToolError::Other(e.to_string()))?;
104        // Atomic write: write to a temp file in the same directory, then rename.
105        // This prevents partial writes from corrupting the file on process kill.
106        let dir = path.parent().unwrap_or(Path::new("."));
107        let mut tmp = tempfile::NamedTempFile::new_in(dir)?;
108        tmp.write_all(toml_str.as_bytes())?;
109        tmp.persist(path)
110            .map_err(|e| ToolError::Other(format!("failed to persist spark file: {e}")))?;
111        Ok(())
112    }
113
114    /// Build the identity preamble for system prompt injection.
115    ///
116    /// When all fields are empty, returns `None` (caller should use the default
117    /// "Astrid" opening). When at least one field is set, builds a
118    /// structured identity block.
119    #[must_use]
120    pub fn build_preamble(&self) -> Option<String> {
121        if self.is_empty() {
122            return None;
123        }
124
125        let mut sections = Vec::new();
126
127        // Opening line: "You are {callsign}" or "You are an AI agent"
128        let opening = if self.callsign.is_empty() {
129            if self.class.is_empty() {
130                "You are an AI agent.".to_string()
131            } else {
132                format!("You are an AI agent, a {}.", self.class)
133            }
134        } else if self.class.is_empty() {
135            format!("You are {}.", self.callsign)
136        } else {
137            format!("You are {}, a {}.", self.callsign, self.class)
138        };
139        sections.push(opening);
140
141        if !self.aura.is_empty() {
142            sections.push(format!("\n# Personality\n{}", self.aura));
143        }
144
145        if !self.signal.is_empty() {
146            sections.push(format!("\n# Communication Style\n{}", self.signal));
147        }
148
149        if !self.core.is_empty() {
150            sections.push(format!("\n# Core Directives\n{}", self.core));
151        }
152
153        Some(sections.join("\n"))
154    }
155
156    /// Merge another spark into this one, only updating non-empty fields.
157    pub fn merge(&mut self, other: &SparkConfig) {
158        if !other.callsign.is_empty() {
159            self.callsign.clone_from(&other.callsign);
160        }
161        if !other.class.is_empty() {
162            self.class.clone_from(&other.class);
163        }
164        if !other.aura.is_empty() {
165            self.aura.clone_from(&other.aura);
166        }
167        if !other.signal.is_empty() {
168            self.signal.clone_from(&other.signal);
169        }
170        if !other.core.is_empty() {
171            self.core.clone_from(&other.core);
172        }
173    }
174
175    /// Merge optional field updates. `None` = don't touch, `Some(value)` = set
176    /// (including `Some("")` to clear a field).
177    pub fn merge_optional(
178        &mut self,
179        callsign: Option<&str>,
180        class: Option<&str>,
181        aura: Option<&str>,
182        signal: Option<&str>,
183        core: Option<&str>,
184    ) {
185        if let Some(v) = callsign {
186            self.callsign = v.to_string();
187        }
188        if let Some(v) = class {
189            self.class = v.to_string();
190        }
191        if let Some(v) = aura {
192            self.aura = v.to_string();
193        }
194        if let Some(v) = signal {
195            self.signal = v.to_string();
196        }
197        if let Some(v) = core {
198            self.core = v.to_string();
199        }
200    }
201}
202
203/// Sanitize a short spark field: trim, collapse to empty if whitespace-only,
204/// remove newlines, and enforce length limit.
205fn sanitize_short_field(field: &mut String) {
206    let trimmed = field.trim().replace(['\n', '\r'], " ");
207    if trimmed.len() > MAX_SHORT_FIELD_LEN {
208        *field = truncate_at_char_boundary(&trimmed, MAX_SHORT_FIELD_LEN);
209    } else {
210        *field = trimmed;
211    }
212}
213
214/// RAII guard that holds a file lock and cleans up the lock file on drop.
215pub(crate) struct SparkLockGuard {
216    file: std::fs::File,
217    path: std::path::PathBuf,
218}
219
220impl Drop for SparkLockGuard {
221    fn drop(&mut self) {
222        // Explicitly release the advisory lock before removing the file.
223        // fs2::FileExt::unlock requires the trait in scope.
224        let _ = <std::fs::File as fs2::FileExt>::unlock(&self.file);
225        let _ = std::fs::remove_file(&self.path);
226    }
227}
228
229/// Acquire an exclusive lock on a `.spark.lock` file next to the spark file.
230///
231/// Returns a [`SparkLockGuard`] that holds the lock. When dropped, the lock
232/// is released and the lock file is cleaned up.
233pub(crate) fn acquire_spark_lock(spark_path: &Path) -> Result<SparkLockGuard, ToolError> {
234    use fs2::FileExt;
235
236    let lock_path = spark_path.with_extension("lock");
237    let file = std::fs::OpenOptions::new()
238        .create(true)
239        .truncate(false)
240        .write(true)
241        .open(&lock_path)?;
242    file.lock_exclusive()
243        .map_err(|e| ToolError::Other(format!("failed to acquire spark lock: {e}")))?;
244    Ok(SparkLockGuard {
245        file,
246        path: lock_path,
247    })
248}
249
250use super::truncate::truncate_at_char_boundary;
251
252#[cfg(test)]
253mod tests {
254    use super::*;
255
256    #[test]
257    fn test_default_is_empty() {
258        let spark = SparkConfig::default();
259        assert!(spark.is_empty());
260    }
261
262    #[test]
263    fn test_not_empty_with_callsign() {
264        let spark = SparkConfig {
265            callsign: "Stellar".to_string(),
266            ..Default::default()
267        };
268        assert!(!spark.is_empty());
269    }
270
271    #[test]
272    fn test_load_from_missing_file() {
273        assert!(SparkConfig::load_from_file(Path::new("/nonexistent/spark.toml")).is_none());
274    }
275
276    #[test]
277    fn test_save_and_load_roundtrip() {
278        let dir = tempfile::tempdir().unwrap();
279        let path = dir.path().join("spark.toml");
280
281        let spark = SparkConfig {
282            callsign: "Nova".to_string(),
283            class: "engineer".to_string(),
284            aura: "sharp".to_string(),
285            signal: "concise".to_string(),
286            core: "I value precision.".to_string(),
287        };
288
289        spark.save_to_file(&path).unwrap();
290        let loaded = SparkConfig::load_from_file(&path).unwrap();
291
292        assert_eq!(loaded.callsign, "Nova");
293        assert_eq!(loaded.class, "engineer");
294        assert_eq!(loaded.aura, "sharp");
295        assert_eq!(loaded.signal, "concise");
296        assert_eq!(loaded.core, "I value precision.");
297    }
298
299    #[test]
300    fn test_build_preamble_empty_returns_none() {
301        let spark = SparkConfig::default();
302        assert!(spark.build_preamble().is_none());
303    }
304
305    #[test]
306    fn test_build_preamble_full() {
307        let spark = SparkConfig {
308            callsign: "Stellar".to_string(),
309            class: "navigator".to_string(),
310            aura: "calm".to_string(),
311            signal: "formal".to_string(),
312            core: "I value clarity.".to_string(),
313        };
314
315        let preamble = spark.build_preamble().unwrap();
316        assert!(preamble.contains("You are Stellar, a navigator."));
317        assert!(preamble.contains("# Personality\ncalm"));
318        assert!(preamble.contains("# Communication Style\nformal"));
319        assert!(preamble.contains("# Core Directives\nI value clarity."));
320    }
321
322    #[test]
323    fn test_build_preamble_callsign_only() {
324        let spark = SparkConfig {
325            callsign: "Orion".to_string(),
326            ..Default::default()
327        };
328
329        let preamble = spark.build_preamble().unwrap();
330        assert!(preamble.contains("You are Orion."));
331        assert!(!preamble.contains("# Personality"));
332        assert!(!preamble.contains("# Communication Style"));
333        assert!(!preamble.contains("# Core Directives"));
334    }
335
336    #[test]
337    fn test_build_preamble_class_only() {
338        let spark = SparkConfig {
339            class: "sentinel".to_string(),
340            ..Default::default()
341        };
342
343        let preamble = spark.build_preamble().unwrap();
344        assert!(preamble.contains("You are an AI agent, a sentinel."));
345    }
346
347    #[test]
348    fn test_merge_updates_non_empty_fields() {
349        let mut base = SparkConfig {
350            callsign: "Nova".to_string(),
351            class: "engineer".to_string(),
352            aura: "sharp".to_string(),
353            signal: String::new(),
354            core: "Original core.".to_string(),
355        };
356
357        let update = SparkConfig {
358            callsign: String::new(), // should NOT overwrite
359            class: "navigator".to_string(),
360            aura: String::new(), // should NOT overwrite
361            signal: "concise".to_string(),
362            core: "Evolved core.".to_string(),
363        };
364
365        base.merge(&update);
366
367        assert_eq!(base.callsign, "Nova"); // preserved
368        assert_eq!(base.class, "navigator"); // updated
369        assert_eq!(base.aura, "sharp"); // preserved
370        assert_eq!(base.signal, "concise"); // updated
371        assert_eq!(base.core, "Evolved core."); // updated
372    }
373
374    #[test]
375    fn test_load_partial_toml() {
376        let dir = tempfile::tempdir().unwrap();
377        let path = dir.path().join("spark.toml");
378        std::fs::write(&path, "callsign = \"Astrid\"\n").unwrap();
379
380        let spark = SparkConfig::load_from_file(&path).unwrap();
381        assert_eq!(spark.callsign, "Astrid");
382        assert!(spark.class.is_empty());
383        assert!(spark.aura.is_empty());
384    }
385
386    #[test]
387    fn test_merge_optional_clears_fields() {
388        let mut spark = SparkConfig {
389            callsign: "Nova".to_string(),
390            class: "engineer".to_string(),
391            aura: "sharp".to_string(),
392            signal: "formal".to_string(),
393            core: "I value clarity.".to_string(),
394        };
395
396        // None = don't touch, Some("") = clear, Some(value) = update
397        spark.merge_optional(None, Some(""), None, Some("concise"), None);
398
399        assert_eq!(spark.callsign, "Nova"); // untouched
400        assert!(spark.class.is_empty()); // cleared
401        assert_eq!(spark.aura, "sharp"); // untouched
402        assert_eq!(spark.signal, "concise"); // updated
403        assert_eq!(spark.core, "I value clarity."); // untouched
404    }
405
406    #[test]
407    fn test_sanitize_trims_whitespace() {
408        let mut spark = SparkConfig {
409            callsign: "  Stellar  ".to_string(),
410            class: "   ".to_string(), // whitespace-only becomes empty
411            ..Default::default()
412        };
413        spark.sanitize();
414        assert_eq!(spark.callsign, "Stellar");
415        assert!(spark.class.is_empty());
416    }
417
418    #[test]
419    fn test_sanitize_removes_newlines_from_short_fields() {
420        let mut spark = SparkConfig {
421            callsign: "Stellar\nEvil".to_string(),
422            class: "nav\rigator".to_string(),
423            ..Default::default()
424        };
425        spark.sanitize();
426        assert_eq!(spark.callsign, "Stellar Evil");
427        assert_eq!(spark.class, "nav igator");
428    }
429
430    #[test]
431    fn test_sanitize_truncates_long_fields() {
432        let mut spark = SparkConfig {
433            callsign: "x".repeat(200),
434            core: "y".repeat(3000),
435            ..Default::default()
436        };
437        spark.sanitize();
438        assert!(spark.callsign.len() <= MAX_SHORT_FIELD_LEN);
439        assert!(spark.core.len() <= MAX_CORE_LEN);
440    }
441
442    #[test]
443    fn test_sanitize_handles_multibyte_truncation() {
444        // 100 emoji = 400 bytes, should truncate at a char boundary
445        let mut spark = SparkConfig {
446            callsign: "🔥".repeat(100),
447            ..Default::default()
448        };
449        spark.sanitize();
450        assert!(spark.callsign.len() <= MAX_SHORT_FIELD_LEN);
451        // Ensure we didn't split a multi-byte char
452        assert!(spark.callsign.is_char_boundary(spark.callsign.len()));
453    }
454
455    #[test]
456    fn test_load_rejects_oversized_file() {
457        let dir = tempfile::tempdir().unwrap();
458        let path = dir.path().join("spark.toml");
459        // Write a file larger than MAX_SPARK_FILE_SIZE
460        let content = "x".repeat(65 * 1024 + 1);
461        std::fs::write(&path, content).unwrap();
462        assert!(SparkConfig::load_from_file(&path).is_none());
463    }
464}