Skip to main content

tj_core/
artifacts.rs

1//! Artifact extraction — regex-based scrape of structured references
2//! out of free-form event text. Captures the bits that turn a journal
3//! entry into a real ledger of what shipped: commit hashes, PR URLs,
4//! ticket IDs, branch names, file paths.
5//!
6//! Intentionally regex-only and side-effect free: the classifier may
7//! still emit a richer JSON payload in the future, but those will be
8//! merged into the same shape via `Artifacts::merge`. Keeping the
9//! extractor pure means `reclassify` can run it offline over historic
10//! events without spawning the model.
11
12use regex::Regex;
13use serde::{Deserialize, Serialize};
14
15/// Structured artifacts collected from one or many events. All vectors
16/// are deduplicated (case-sensitive) by the `merge` constructor — the
17/// extractor itself emits raw matches.
18#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]
19pub struct Artifacts {
20    #[serde(default, skip_serializing_if = "Vec::is_empty")]
21    pub commit_hashes: Vec<String>,
22    #[serde(default, skip_serializing_if = "Vec::is_empty")]
23    pub pr_urls: Vec<String>,
24    #[serde(default, skip_serializing_if = "Vec::is_empty")]
25    pub linked_issues: Vec<String>,
26    #[serde(default, skip_serializing_if = "Vec::is_empty")]
27    pub files: Vec<String>,
28    #[serde(default, skip_serializing_if = "Vec::is_empty")]
29    pub branch_names: Vec<String>,
30}
31
32impl Artifacts {
33    pub fn is_empty(&self) -> bool {
34        self.commit_hashes.is_empty()
35            && self.pr_urls.is_empty()
36            && self.linked_issues.is_empty()
37            && self.files.is_empty()
38            && self.branch_names.is_empty()
39    }
40
41    /// Merge another `Artifacts` into self, preserving insertion order
42    /// and deduplicating exact-match strings.
43    pub fn merge(&mut self, other: Artifacts) {
44        for (dst, src) in [
45            (&mut self.commit_hashes, other.commit_hashes),
46            (&mut self.pr_urls, other.pr_urls),
47            (&mut self.linked_issues, other.linked_issues),
48            (&mut self.files, other.files),
49            (&mut self.branch_names, other.branch_names),
50        ] {
51            for s in src {
52                if !dst.iter().any(|x| x == &s) {
53                    dst.push(s);
54                }
55            }
56        }
57    }
58}
59
60/// Extract artifacts from a single piece of text (event body, prompt,
61/// tool output — anything stringly-typed). Idempotent and free of I/O.
62pub fn extract(text: &str) -> Artifacts {
63    let mut a = Artifacts::default();
64
65    // Commit hashes — 7 to 40 hex chars surrounded by word boundaries.
66    // Word boundary on \b avoids matching inside longer non-hex tokens
67    // (e.g. ULIDs are base32, but adjacent digits + letters could
68    // technically pass — the boundary keeps matches clean).
69    static_re(
70        r"\b[0-9a-f]{7,40}\b",
71        |m| {
72            // Reject if all-digits (could be a year, an ID, a port).
73            // A real abbreviated commit always has at least one letter.
74            if m.chars().all(|c| c.is_ascii_digit()) {
75                return;
76            }
77            a.commit_hashes.push(m.to_string());
78        },
79        text,
80    );
81
82    // GitHub / GitLab PR URLs.
83    static_re(
84        r"https?://[A-Za-z0-9.\-]+/[A-Za-z0-9_./\-]+/(?:pull|merge_requests)/\d+",
85        |m| a.pr_urls.push(m.to_string()),
86        text,
87    );
88
89    // Ticket IDs: ABC-123. At least 2 letters to avoid matching version
90    // strings like v1-2 and minimum 1 digit.
91    static_re(
92        r"\b[A-Z]{2,}-\d+\b",
93        |m| a.linked_issues.push(m.to_string()),
94        text,
95    );
96
97    // File paths — heuristic: path-like tokens with at least one slash
98    // (and an extension) OR a leading ./ . Tight enough to skip prose,
99    // loose enough to catch the common cases (src/foo.rs, ./bar.ts,
100    // crates/tj-core/src/db.rs).
101    static_re(
102        r"(?:\./|[A-Za-z0-9_\-]+/)+[A-Za-z0-9_.\-]+\.[A-Za-z0-9]{1,8}\b",
103        |m| a.files.push(m.to_string()),
104        text,
105    );
106
107    // Branch names from `git checkout -b <name>` / `git switch -c
108    // <name>` / `branch <name>` blurbs.
109    static_re(
110        r"(?:checkout -b|switch -c|branch)\s+([A-Za-z0-9._/\-]+)",
111        |_full| {},
112        text,
113    );
114    // The closure form above fires on the whole match; capture the
115    // group separately because Regex::captures is what we actually want
116    // here. Done as a second pass to keep the extractor flat.
117    if let Ok(re) = Regex::new(r"(?:checkout -b|switch -c|branch)\s+([A-Za-z0-9._/\-]+)") {
118        for cap in re.captures_iter(text) {
119            if let Some(m) = cap.get(1) {
120                a.branch_names.push(m.as_str().to_string());
121            }
122        }
123    }
124
125    // Dedup in place — emit-time order matters for stable test output.
126    dedup(&mut a.commit_hashes);
127    dedup(&mut a.pr_urls);
128    dedup(&mut a.linked_issues);
129    dedup(&mut a.files);
130    dedup(&mut a.branch_names);
131    a
132}
133
134fn dedup(v: &mut Vec<String>) {
135    let mut seen = std::collections::HashSet::new();
136    v.retain(|x| seen.insert(x.clone()));
137}
138
139fn static_re(pat: &str, mut f: impl FnMut(&str), text: &str) {
140    if let Ok(re) = Regex::new(pat) {
141        for m in re.find_iter(text) {
142            f(m.as_str());
143        }
144    }
145}
146
147#[cfg(test)]
148mod tests {
149    use super::*;
150
151    #[test]
152    fn extracts_commit_hash() {
153        let a = extract("fixed in commit abc1234 and 9012abcdef");
154        assert_eq!(a.commit_hashes, vec!["abc1234", "9012abcdef"]);
155    }
156
157    #[test]
158    fn rejects_all_digit_commit_lookalikes() {
159        // Year-like sequence, port numbers, etc.
160        let a = extract("ran tests on port 12345 in 2026");
161        assert!(a.commit_hashes.is_empty());
162    }
163
164    #[test]
165    fn extracts_github_pr_url() {
166        let a = extract("see https://github.com/Digital-Threads/Task-Journal/pull/42");
167        assert_eq!(
168            a.pr_urls,
169            vec!["https://github.com/Digital-Threads/Task-Journal/pull/42"]
170        );
171    }
172
173    #[test]
174    fn extracts_linked_issues() {
175        let a = extract("FIN-868 references JIRA-12345 and INC-7");
176        assert_eq!(a.linked_issues, vec!["FIN-868", "JIRA-12345", "INC-7"]);
177    }
178
179    #[test]
180    fn extracts_file_paths() {
181        let a = extract("edited crates/tj-core/src/db.rs and ./README.md");
182        assert!(a.files.contains(&"crates/tj-core/src/db.rs".to_string()));
183        assert!(a.files.contains(&"./README.md".to_string()));
184    }
185
186    #[test]
187    fn extracts_branch_names() {
188        let a = extract("git checkout -b FIN-868-fix-paygate-fee then switch -c hotfix/abc");
189        assert_eq!(
190            a.branch_names,
191            vec!["FIN-868-fix-paygate-fee", "hotfix/abc"]
192        );
193    }
194
195    #[test]
196    fn merge_dedupes() {
197        let mut a = Artifacts {
198            commit_hashes: vec!["abc1234".into()],
199            ..Default::default()
200        };
201        let b = Artifacts {
202            commit_hashes: vec!["abc1234".into(), "def5678".into()],
203            ..Default::default()
204        };
205        a.merge(b);
206        assert_eq!(a.commit_hashes, vec!["abc1234", "def5678"]);
207    }
208
209    #[test]
210    fn empty_text_yields_empty_artifacts() {
211        let a = extract("");
212        assert!(a.is_empty());
213    }
214
215    #[test]
216    fn json_round_trip() {
217        let a = Artifacts {
218            commit_hashes: vec!["abc1234".into()],
219            linked_issues: vec!["FIN-868".into()],
220            ..Default::default()
221        };
222        let s = serde_json::to_string(&a).unwrap();
223        let b: Artifacts = serde_json::from_str(&s).unwrap();
224        assert_eq!(a, b);
225    }
226}