use std::collections::HashMap;
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize, Default)]
pub struct PrefixConfig {
#[serde(default)]
pub mappings: HashMap<String, Vec<String>>,
#[serde(default)]
pub candidate_prefixes: Vec<Vec<String>>,
#[serde(default)]
pub learn_on_successful_fallback: bool,
}
pub trait PrefixStore {
fn load(&self) -> PrefixConfig;
fn confirm_mapping(&self, key: &str, prefix: &[String]) -> Result<(), std::io::Error>;
fn remove_mapping(&self, key: &str) -> Result<bool, std::io::Error>;
}
pub struct FilePrefixStore {
pub path: std::path::PathBuf,
}
impl FilePrefixStore {
pub fn default_path() -> std::path::PathBuf {
std::env::var_os("CRS_RX_PREFIXES")
.map(std::path::PathBuf::from)
.unwrap_or_else(|| {
let base = std::env::var_os("XDG_CONFIG_HOME")
.map(std::path::PathBuf::from)
.unwrap_or_else(|| {
std::path::PathBuf::from(std::env::var_os("HOME").unwrap_or_default())
.join(".config")
});
base.join("rx").join("prefixes.toml")
})
}
fn load_config(&self) -> PrefixConfig {
let content = match std::fs::read_to_string(&self.path) {
Ok(c) => c,
Err(_) => return PrefixConfig::default(),
};
match toml::from_str(&content) {
Ok(cfg) => cfg,
Err(e) => {
eprintln!(
"prefixe: warn: could not parse {}: {e}; using empty config",
self.path.display()
);
PrefixConfig::default()
}
}
}
fn write_config(&self, config: &PrefixConfig) -> Result<(), std::io::Error> {
let serialized = toml::to_string_pretty(config)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?;
std::fs::write(&self.path, serialized)
}
}
impl PrefixStore for FilePrefixStore {
fn load(&self) -> PrefixConfig {
self.load_config()
}
fn confirm_mapping(&self, key: &str, prefix: &[String]) -> Result<(), std::io::Error> {
let mut config = self.load_config();
config.mappings.insert(key.to_string(), prefix.to_vec());
self.write_config(&config)
}
fn remove_mapping(&self, key: &str) -> Result<bool, std::io::Error> {
let mut config = self.load_config();
if config.mappings.remove(key).is_none() {
return Ok(false);
}
self.write_config(&config)?;
Ok(true)
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Segment {
pub text: String,
pub sep: Option<String>,
}
pub fn split_segments(cmd: &str) -> Vec<Segment> {
let seps = ["&&", "||", ";", "|"];
let mut result = Vec::new();
let mut remaining = cmd;
'outer: loop {
let mut earliest: Option<(usize, &str)> = None;
for sep in &seps {
if let Some(pos) = remaining.find(sep) {
let better = match earliest {
None => true,
Some((e, prev)) => pos < e || (pos == e && sep.len() > prev.len()),
};
if better {
earliest = Some((pos, sep));
}
}
}
match earliest {
None => {
result.push(Segment {
text: remaining.to_string(),
sep: None,
});
break 'outer;
}
Some((pos, sep)) => {
result.push(Segment {
text: remaining[..pos].to_string(),
sep: Some(sep.to_string()),
});
remaining = &remaining[pos + sep.len()..];
}
}
}
result
}
pub fn rejoin(segs: &[Segment]) -> String {
let mut out = String::new();
for seg in segs {
out.push_str(&seg.text);
if let Some(sep) = &seg.sep {
out.push_str(sep);
}
}
out
}
#[derive(Debug, Clone, PartialEq)]
pub enum PrefixMatch {
Confirmed { key: String, prefix: Vec<String> },
Candidate { key: String, prefix: Vec<String> },
}
pub fn lookup_prefix(segment: &str, config: &PrefixConfig) -> Option<PrefixMatch> {
let trimmed = segment.trim();
if trimmed.contains("$(") || trimmed.contains('`') {
return None;
}
let tokens = shell_words::split(trimmed).ok()?;
let first = tokens.first()?.as_str();
let second = tokens.get(1).map(|s| s.as_str());
if let Some(second) = second {
let two_word = format!("{first} {second}");
if let Some(prefix) = config.mappings.get(&two_word) {
return Some(PrefixMatch::Confirmed {
key: two_word,
prefix: prefix.clone(),
});
}
}
if let Some(prefix) = config.mappings.get(first) {
return Some(PrefixMatch::Confirmed {
key: first.to_string(),
prefix: prefix.clone(),
});
}
if let Some(candidate) = config.candidate_prefixes.first() {
return Some(PrefixMatch::Candidate {
key: first.to_string(),
prefix: candidate.clone(),
});
}
None
}
#[derive(Debug, Clone, PartialEq)]
pub struct ProbeEntry {
pub key: String,
pub prefix: Vec<String>,
pub original_command: String,
}
#[derive(Debug, Clone)]
pub struct RewriteResult {
pub rewritten: String,
pub probes: Vec<ProbeEntry>,
}
pub fn rewrite_command(cmd: &str, config: &PrefixConfig) -> RewriteResult {
let mut segs = split_segments(cmd);
let mut probes = Vec::new();
for seg in &mut segs {
let trimmed = seg.text.trim();
if trimmed.is_empty() {
continue;
}
let Some(m) = lookup_prefix(trimmed, config) else {
continue;
};
let (key, prefix, is_candidate) = match m {
PrefixMatch::Confirmed { key, prefix } => (key, prefix, false),
PrefixMatch::Candidate { key, prefix } => (key, prefix, true),
};
let leading_len = seg.text.len() - seg.text.trim_start().len();
let leading = &seg.text[..leading_len];
let trailing_start = leading_len + trimmed.len();
let trailing = &seg.text[trailing_start..];
let prefix_str = prefix.join(" ");
seg.text = format!("{leading}{prefix_str} {trimmed}{trailing}");
if is_candidate && config.learn_on_successful_fallback {
probes.push(ProbeEntry {
key,
prefix,
original_command: cmd.to_string(),
});
}
}
RewriteResult {
rewritten: rejoin(&segs),
probes,
}
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Default)]
struct ProbeFile {
#[serde(default)]
probes: Vec<ProbeEntryToml>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
struct ProbeEntryToml {
key: String,
prefix: Vec<String>,
original_command: String,
}
impl From<&ProbeEntry> for ProbeEntryToml {
fn from(e: &ProbeEntry) -> Self {
Self {
key: e.key.clone(),
prefix: e.prefix.clone(),
original_command: e.original_command.clone(),
}
}
}
impl From<ProbeEntryToml> for ProbeEntry {
fn from(t: ProbeEntryToml) -> Self {
Self {
key: t.key,
prefix: t.prefix,
original_command: t.original_command,
}
}
}
pub trait ProbeStore {
fn load(&self) -> Vec<ProbeEntry>;
fn write(&self, entries: &[ProbeEntry]) -> Result<(), std::io::Error>;
fn remove_matching(&self, cmd: &str) -> Result<(), std::io::Error>;
}
pub struct FileProbeStore {
pub path: std::path::PathBuf,
}
impl FileProbeStore {
pub fn default_path() -> std::path::PathBuf {
std::env::var_os("CRS_CTX_DIR")
.map(std::path::PathBuf::from)
.unwrap_or_else(|| std::path::Path::new(".ctx").to_path_buf())
.join("candidates.toml")
}
}
impl ProbeStore for FileProbeStore {
fn load(&self) -> Vec<ProbeEntry> {
let Ok(content) = std::fs::read_to_string(&self.path) else {
return Vec::new();
};
toml::from_str::<ProbeFile>(&content)
.unwrap_or_default()
.probes
.into_iter()
.map(ProbeEntry::from)
.collect()
}
fn write(&self, entries: &[ProbeEntry]) -> Result<(), std::io::Error> {
if let Some(parent) = self.path.parent() {
std::fs::create_dir_all(parent)?;
}
let file = ProbeFile {
probes: entries.iter().map(ProbeEntryToml::from).collect(),
};
let serialized = toml::to_string_pretty(&file)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?;
std::fs::write(&self.path, serialized)
}
fn remove_matching(&self, cmd: &str) -> Result<(), std::io::Error> {
let mut entries = self.load();
let before = entries.len();
entries.retain(|e| e.original_command != cmd);
if entries.len() < before {
self.write(&entries)?;
}
Ok(())
}
}
#[derive(Debug, Clone, Default)]
pub struct AuditState {
pub mappings: Vec<(String, Vec<String>)>,
pub probes: Vec<ProbeEntry>,
}
pub fn audit_state(prefix_store: &dyn PrefixStore, probe_store: &dyn ProbeStore) -> AuditState {
let config = prefix_store.load();
let mut mappings: Vec<(String, Vec<String>)> = config.mappings.into_iter().collect();
mappings.sort_by(|a, b| a.0.cmp(&b.0));
let probes = probe_store.load();
AuditState { mappings, probes }
}
#[cfg(any(test, feature = "testing"))]
pub mod testing {
use super::*;
pub struct FakePrefixStore {
pub config: PrefixConfig,
pub confirmed: std::cell::RefCell<Option<(String, Vec<String>)>>,
pub removed: std::cell::RefCell<Option<String>>,
}
impl FakePrefixStore {
pub fn new(config: PrefixConfig) -> Self {
Self {
config,
confirmed: std::cell::RefCell::new(None),
removed: std::cell::RefCell::new(None),
}
}
}
impl PrefixStore for FakePrefixStore {
fn load(&self) -> PrefixConfig {
self.config.clone()
}
fn confirm_mapping(&self, key: &str, prefix: &[String]) -> Result<(), std::io::Error> {
*self.confirmed.borrow_mut() = Some((key.to_string(), prefix.to_vec()));
Ok(())
}
fn remove_mapping(&self, key: &str) -> Result<bool, std::io::Error> {
let existed = self.config.mappings.contains_key(key);
*self.removed.borrow_mut() = Some(key.to_string());
Ok(existed)
}
}
pub struct FakeProbeStore {
pub entries: std::cell::RefCell<Vec<ProbeEntry>>,
}
impl FakeProbeStore {
pub fn new(entries: Vec<ProbeEntry>) -> Self {
Self {
entries: std::cell::RefCell::new(entries),
}
}
pub fn empty() -> Self {
Self::new(vec![])
}
}
impl ProbeStore for FakeProbeStore {
fn load(&self) -> Vec<ProbeEntry> {
self.entries.borrow().clone()
}
fn write(&self, entries: &[ProbeEntry]) -> Result<(), std::io::Error> {
*self.entries.borrow_mut() = entries.to_vec();
Ok(())
}
fn remove_matching(&self, cmd: &str) -> Result<(), std::io::Error> {
self.entries
.borrow_mut()
.retain(|e| e.original_command != cmd);
Ok(())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use testing::{FakePrefixStore, FakeProbeStore};
fn make_store(mappings: &[(&str, &[&str])], candidates: &[&[&str]]) -> FakePrefixStore {
FakePrefixStore::new(PrefixConfig {
mappings: mappings
.iter()
.map(|(k, v)| (k.to_string(), v.iter().map(|s| s.to_string()).collect()))
.collect(),
candidate_prefixes: candidates
.iter()
.map(|c| c.iter().map(|s| s.to_string()).collect())
.collect(),
learn_on_successful_fallback: false,
})
}
#[test]
fn split_simple_pipeline() {
let segs = split_segments("cargo build | tail -5");
assert_eq!(
segs,
vec![
Segment {
text: "cargo build ".to_string(),
sep: Some("|".to_string())
},
Segment {
text: " tail -5".to_string(),
sep: None
},
]
);
}
#[test]
fn split_and_and() {
let segs = split_segments("git add -A && git commit -m 'msg'");
assert_eq!(
segs,
vec![
Segment {
text: "git add -A ".to_string(),
sep: Some("&&".to_string())
},
Segment {
text: " git commit -m 'msg'".to_string(),
sep: None
},
]
);
}
#[test]
fn split_or_or_beats_single_pipe_at_same_position() {
let segs = split_segments("a || b");
assert_eq!(
segs,
vec![
Segment {
text: "a ".to_string(),
sep: Some("||".to_string())
},
Segment {
text: " b".to_string(),
sep: None
},
]
);
}
#[test]
fn rejoin_preserves_separators() {
let segs = vec![
Segment {
text: "cargo build ".to_string(),
sep: Some("|".to_string()),
},
Segment {
text: " tail -5".to_string(),
sep: None,
},
];
assert_eq!(rejoin(&segs), "cargo build | tail -5");
}
#[test]
fn lookup_single_word_key_matches() {
let store = make_store(&[("gh", &["op", "plugin", "run", "--"])], &[]);
let result = lookup_prefix("gh issue list", &store.load());
assert_eq!(
result,
Some(PrefixMatch::Confirmed {
key: "gh".to_string(),
prefix: vec![
"op".to_string(),
"plugin".to_string(),
"run".to_string(),
"--".to_string()
],
})
);
}
#[test]
fn lookup_two_word_key_wins_over_single() {
let store = make_store(
&[
("cargo", &["op", "plugin", "run", "--"]),
("cargo test", &["dotenvx", "run", "--"]),
],
&[],
);
let result = lookup_prefix("cargo test --workspace", &store.load());
assert_eq!(
result,
Some(PrefixMatch::Confirmed {
key: "cargo test".to_string(),
prefix: vec!["dotenvx".to_string(), "run".to_string(), "--".to_string()],
})
);
}
#[test]
fn lookup_no_match_returns_none() {
let store = make_store(&[], &[]);
assert_eq!(lookup_prefix("echo hello", &store.load()), None);
}
#[test]
fn lookup_candidate_fallback() {
let store = make_store(&[], &[&["op", "plugin", "run", "--"]]);
let result = lookup_prefix("gh issue list", &store.load());
assert_eq!(
result,
Some(PrefixMatch::Candidate {
key: "gh".to_string(),
prefix: vec![
"op".to_string(),
"plugin".to_string(),
"run".to_string(),
"--".to_string()
],
})
);
}
#[test]
fn lookup_skips_subshell() {
let store = make_store(&[("gh", &["op", "plugin", "run", "--"])], &[]);
assert_eq!(lookup_prefix("$(gh issue list)", &store.load()), None);
assert_eq!(lookup_prefix("`gh issue list`", &store.load()), None);
}
#[test]
fn rewrite_simple_confirmed() {
let store = make_store(&[("gh", &["op", "plugin", "run", "--"])], &[]);
let r = rewrite_command("gh issue list", &store.load());
assert_eq!(r.rewritten, "op plugin run -- gh issue list");
assert!(r.probes.is_empty());
}
#[test]
fn rewrite_compound_each_segment() {
let store = make_store(&[("gh", &["op", "plugin", "run", "--"])], &[]);
let r = rewrite_command("gh issue list && gh pr list", &store.load());
assert_eq!(
r.rewritten,
"op plugin run -- gh issue list && op plugin run -- gh pr list"
);
}
#[test]
fn rewrite_candidate_no_probe_when_learn_disabled() {
let store = make_store(&[], &[&["op", "plugin", "run", "--"]]);
let r = rewrite_command("gh issue list", &store.load());
assert_eq!(r.rewritten, "op plugin run -- gh issue list");
assert!(
r.probes.is_empty(),
"probes should be empty when learn=false"
);
}
#[test]
fn rewrite_candidate_records_probe_when_learn_enabled() {
let mut config = make_store(&[], &[&["op", "plugin", "run", "--"]]).config;
config.learn_on_successful_fallback = true;
let r = rewrite_command("gh issue list", &config);
assert_eq!(r.rewritten, "op plugin run -- gh issue list");
assert_eq!(r.probes.len(), 1);
assert_eq!(r.probes[0].key, "gh");
}
#[test]
fn rewrite_no_match_unchanged() {
let store = make_store(&[], &[]);
let r = rewrite_command("echo hello", &store.load());
assert_eq!(r.rewritten, "echo hello");
assert!(r.probes.is_empty());
}
#[test]
fn probe_store_round_trips() {
let dir = tempfile::TempDir::new().unwrap();
let store = FileProbeStore {
path: dir.path().join("candidates.toml"),
};
let entries = vec![ProbeEntry {
key: "gh".to_string(),
prefix: vec![
"op".to_string(),
"plugin".to_string(),
"run".to_string(),
"--".to_string(),
],
original_command: "gh issue list".to_string(),
}];
store.write(&entries).unwrap();
let loaded = store.load();
assert_eq!(loaded.len(), 1);
assert_eq!(loaded[0].key, "gh");
}
#[test]
fn probe_store_remove_matching() {
let dir = tempfile::TempDir::new().unwrap();
let store = FileProbeStore {
path: dir.path().join("candidates.toml"),
};
store
.write(&[
ProbeEntry {
key: "gh".to_string(),
prefix: vec![],
original_command: "gh issue list".to_string(),
},
ProbeEntry {
key: "cargo".to_string(),
prefix: vec![],
original_command: "cargo build".to_string(),
},
])
.unwrap();
store.remove_matching("gh issue list").unwrap();
let remaining = store.load();
assert_eq!(remaining.len(), 1);
assert_eq!(remaining[0].key, "cargo");
}
#[test]
fn prefix_store_confirm_and_remove() {
let dir = tempfile::TempDir::new().unwrap();
let store = FilePrefixStore {
path: dir.path().join("prefixes.toml"),
};
store
.confirm_mapping(
"gh",
&["op".to_string(), "run".to_string(), "--".to_string()],
)
.unwrap();
let config = store.load();
assert!(config.mappings.contains_key("gh"));
let removed = store.remove_mapping("gh").unwrap();
assert!(removed);
let config = store.load();
assert!(!config.mappings.contains_key("gh"));
let removed_again = store.remove_mapping("gh").unwrap();
assert!(!removed_again);
}
#[test]
fn fake_prefix_store_confirm_and_remove() {
let store = FakePrefixStore::new(PrefixConfig {
mappings: [("gh".to_string(), vec!["op".to_string()])]
.into_iter()
.collect(),
..Default::default()
});
store
.confirm_mapping("cargo", &["dotenvx".to_string()])
.unwrap();
assert_eq!(store.confirmed.borrow().as_ref().unwrap().0, "cargo");
let existed = store.remove_mapping("gh").unwrap();
assert!(existed);
let missing = store.remove_mapping("missing").unwrap();
assert!(!missing);
}
#[test]
fn fake_probe_store_round_trip() {
let store = FakeProbeStore::empty();
let entries = vec![ProbeEntry {
key: "gh".to_string(),
prefix: vec![],
original_command: "gh issue list".to_string(),
}];
store.write(&entries).unwrap();
assert_eq!(store.load().len(), 1);
store.remove_matching("gh issue list").unwrap();
assert!(store.load().is_empty());
}
}