use crate::models::ToolCategory;
#[cfg(feature = "terraphim")]
use crate::models::ToolChain;
use anyhow::{Context, Result};
use indexmap::IndexMap;
use jiff::Timestamp;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PatternLearner {
candidate_patterns: IndexMap<String, CandidatePattern>,
promotion_threshold: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CandidatePattern {
pub tool_name: String,
pub observations: u32,
pub contexts: Vec<String>,
pub category_votes: HashMap<String, u32>,
pub first_seen: Timestamp,
pub last_seen: Timestamp,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LearnedPattern {
pub tool_name: String,
pub category: ToolCategory,
pub confidence: f32,
pub observations: u32,
pub learned_at: Timestamp,
}
impl Default for PatternLearner {
fn default() -> Self {
Self::new()
}
}
#[allow(dead_code)] impl PatternLearner {
#[must_use]
pub fn new() -> Self {
Self {
candidate_patterns: IndexMap::new(),
promotion_threshold: 3,
}
}
#[must_use]
pub fn with_threshold(threshold: u32) -> Self {
Self {
candidate_patterns: IndexMap::new(),
promotion_threshold: threshold,
}
}
pub fn observe(&mut self, tool_name: String, command: String, category: ToolCategory) {
let category_str = category_to_string(&category);
let now = Timestamp::now();
self.candidate_patterns
.entry(tool_name.clone())
.and_modify(|candidate| {
candidate.observations += 1;
candidate.last_seen = now;
if !candidate.contexts.contains(&command) && candidate.contexts.len() < 10 {
candidate.contexts.push(command.clone());
}
*candidate
.category_votes
.entry(category_str.clone())
.or_insert(0) += 1;
})
.or_insert_with(|| CandidatePattern {
tool_name: tool_name.clone(),
observations: 1,
contexts: vec![command],
category_votes: {
let mut votes = HashMap::new();
votes.insert(category_str, 1);
votes
},
first_seen: now,
last_seen: now,
});
}
pub fn promote_candidates(&mut self) -> Vec<LearnedPattern> {
let mut promoted = Vec::new();
let now = Timestamp::now();
let candidates_to_promote: Vec<String> = self
.candidate_patterns
.iter()
.filter(|(_, candidate)| candidate.observations >= self.promotion_threshold)
.map(|(name, _)| name.clone())
.collect();
for tool_name in candidates_to_promote {
if let Some(candidate) = self.candidate_patterns.shift_remove(&tool_name) {
let category = determine_category(&candidate.category_votes, &candidate.contexts);
let confidence =
calculate_confidence(&candidate.category_votes, candidate.observations);
promoted.push(LearnedPattern {
tool_name: candidate.tool_name,
category,
confidence,
observations: candidate.observations,
learned_at: now,
});
}
}
promoted
}
#[must_use]
pub fn candidate_count(&self) -> usize {
self.candidate_patterns.len()
}
pub fn save_to_cache(&self, learned_patterns: &[LearnedPattern]) -> Result<()> {
let cache_path = get_cache_path()?;
if let Some(parent) = cache_path.parent() {
std::fs::create_dir_all(parent).with_context(|| {
format!("Failed to create cache directory: {}", parent.display())
})?;
}
let json = serde_json::to_string_pretty(learned_patterns)
.context("Failed to serialize learned patterns")?;
std::fs::write(&cache_path, json).with_context(|| {
format!(
"Failed to write learned patterns to {}",
cache_path.display()
)
})?;
Ok(())
}
pub fn load_from_cache() -> Result<Vec<LearnedPattern>> {
let cache_path = get_cache_path()?;
if !cache_path.exists() {
return Ok(Vec::new());
}
let content = std::fs::read_to_string(&cache_path)
.with_context(|| format!("Failed to read cache file: {}", cache_path.display()))?;
let patterns: Vec<LearnedPattern> = serde_json::from_str(&content)
.context("Failed to parse learned patterns from cache")?;
Ok(patterns)
}
#[must_use]
pub fn get_candidates(&self) -> Vec<&CandidatePattern> {
self.candidate_patterns.values().collect()
}
}
#[allow(dead_code)] fn determine_category(category_votes: &HashMap<String, u32>, contexts: &[String]) -> ToolCategory {
let winner = category_votes
.iter()
.max_by_key(|(_, count)| *count)
.map(|(category, _)| category.as_str());
if let Some(category_str) = winner {
string_to_category(category_str)
} else {
infer_category_from_contexts(contexts)
}
}
#[allow(dead_code)] fn calculate_confidence(category_votes: &HashMap<String, u32>, total_observations: u32) -> f32 {
if total_observations == 0 {
return 0.0;
}
let max_votes = category_votes.values().max().copied().unwrap_or(0);
#[allow(clippy::cast_precision_loss)]
let confidence = (max_votes as f32) / (total_observations as f32);
confidence.clamp(0.0, 1.0)
}
#[allow(dead_code)] pub fn infer_category_from_contexts(contexts: &[String]) -> ToolCategory {
let combined_context = contexts.join(" ").to_lowercase();
if combined_context.contains("test")
|| combined_context.contains("spec")
|| combined_context.contains("jest")
|| combined_context.contains("pytest")
|| combined_context.contains("mocha")
{
return ToolCategory::Testing;
}
if combined_context.contains("build")
|| combined_context.contains("webpack")
|| combined_context.contains("vite")
|| combined_context.contains("rollup")
|| combined_context.contains("esbuild")
{
return ToolCategory::BuildTool;
}
if combined_context.contains("lint")
|| combined_context.contains("eslint")
|| combined_context.contains("clippy")
|| combined_context.contains("pylint")
{
return ToolCategory::Linting;
}
if combined_context.contains("git ")
|| combined_context.contains("commit")
|| combined_context.contains("push")
|| combined_context.contains("pull")
{
return ToolCategory::Git;
}
if combined_context.contains("install")
|| combined_context.contains("npm ")
|| combined_context.contains("yarn ")
|| combined_context.contains("pnpm ")
|| combined_context.contains("cargo ")
|| combined_context.contains("pip ")
{
return ToolCategory::PackageManager;
}
if combined_context.contains("deploy")
|| combined_context.contains("publish")
|| combined_context.contains("wrangler")
|| combined_context.contains("vercel")
|| combined_context.contains("netlify")
{
return ToolCategory::CloudDeploy;
}
if combined_context.contains("database")
|| combined_context.contains("migrate")
|| combined_context.contains("psql")
|| combined_context.contains("mysql")
{
return ToolCategory::Database;
}
ToolCategory::Other("unknown".to_string())
}
#[allow(dead_code)] fn category_to_string(category: &ToolCategory) -> String {
match category {
ToolCategory::PackageManager => "PackageManager".to_string(),
ToolCategory::BuildTool => "BuildTool".to_string(),
ToolCategory::Testing => "Testing".to_string(),
ToolCategory::Linting => "Linting".to_string(),
ToolCategory::Git => "Git".to_string(),
ToolCategory::CloudDeploy => "CloudDeploy".to_string(),
ToolCategory::Database => "Database".to_string(),
ToolCategory::Other(s) => format!("Other({s})"),
}
}
#[allow(dead_code)] fn string_to_category(s: &str) -> ToolCategory {
match s {
"PackageManager" => ToolCategory::PackageManager,
"BuildTool" => ToolCategory::BuildTool,
"Testing" => ToolCategory::Testing,
"Linting" => ToolCategory::Linting,
"Git" => ToolCategory::Git,
"CloudDeploy" => ToolCategory::CloudDeploy,
"Database" => ToolCategory::Database,
s if s.starts_with("Other(") => {
let inner = s.trim_start_matches("Other(").trim_end_matches(')');
ToolCategory::Other(inner.to_string())
}
_ => ToolCategory::Other(s.to_string()),
}
}
#[allow(dead_code)] fn get_cache_path() -> Result<PathBuf> {
let home = home::home_dir().context("Could not find home directory")?;
Ok(home
.join(".config")
.join("claude-log-analyzer")
.join("learned_patterns.json"))
}
#[cfg(feature = "terraphim")]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[allow(dead_code)] pub struct ToolRelationship {
pub from_tool: String,
pub to_tool: String,
pub relationship_type: RelationType,
pub confidence: f32,
}
#[cfg(feature = "terraphim")]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[allow(dead_code)] pub enum RelationType {
DependsOn,
Replaces,
Complements,
Conflicts,
}
#[cfg(feature = "terraphim")]
#[allow(dead_code)] impl ToolRelationship {
#[must_use]
pub fn infer_from_chain(chain: &ToolChain) -> Vec<Self> {
let mut relationships = Vec::new();
for i in 0..chain.tools.len().saturating_sub(1) {
let from_tool = &chain.tools[i];
let to_tool = &chain.tools[i + 1];
#[allow(clippy::cast_precision_loss)]
let frequency_factor = (chain.frequency.min(10) as f32) / 10.0;
let base_confidence = chain.success_rate * frequency_factor;
let confidence = if is_known_dependency(from_tool, to_tool) {
(base_confidence + 0.2).min(1.0)
} else {
base_confidence
};
relationships.push(ToolRelationship {
from_tool: to_tool.clone(),
to_tool: from_tool.clone(),
relationship_type: RelationType::DependsOn,
confidence,
});
}
relationships
}
#[must_use]
pub fn new(
from_tool: String,
to_tool: String,
relationship_type: RelationType,
confidence: f32,
) -> Self {
Self {
from_tool,
to_tool,
relationship_type,
confidence: confidence.clamp(0.0, 1.0),
}
}
}
#[cfg(feature = "terraphim")]
#[allow(dead_code)] fn is_known_dependency(dependency: &str, dependent: &str) -> bool {
matches!(
(dependency, dependent),
("npm", "wrangler")
| ("npm", "vercel")
| ("npm", "netlify")
| ("cargo", "clippy")
| ("git", "npm")
| ("git", "cargo")
| ("npm", "npx")
| ("yarn", "npx")
)
}
#[cfg(feature = "terraphim")]
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[allow(dead_code)] pub struct KnowledgeGraph {
pub relationships: Vec<ToolRelationship>,
}
#[cfg(feature = "terraphim")]
#[allow(dead_code)] impl KnowledgeGraph {
#[must_use]
pub fn new() -> Self {
Self {
relationships: Vec::new(),
}
}
#[must_use]
pub fn build_from_chains(chains: &[ToolChain]) -> Self {
let mut graph = Self::new();
for chain in chains {
let relationships = ToolRelationship::infer_from_chain(chain);
for rel in relationships {
graph.add_relationship(rel);
}
}
graph.infer_replacement_relationships(chains);
graph.infer_complement_relationships(chains);
graph
}
pub fn add_relationship(&mut self, new_rel: ToolRelationship) {
if let Some(existing) = self.relationships.iter_mut().find(|r| {
r.from_tool == new_rel.from_tool
&& r.to_tool == new_rel.to_tool
&& r.relationship_type == new_rel.relationship_type
}) {
existing.confidence = (existing.confidence + new_rel.confidence) / 2.0;
} else {
self.relationships.push(new_rel);
}
}
fn infer_replacement_relationships(&mut self, chains: &[ToolChain]) {
let mut position_map: HashMap<usize, HashMap<String, u32>> = HashMap::new();
for chain in chains {
for (pos, tool) in chain.tools.iter().enumerate() {
*position_map
.entry(pos)
.or_default()
.entry(tool.clone())
.or_insert(0) += chain.frequency;
}
}
for tools_at_position in position_map.values() {
let tools: Vec<(&String, &u32)> = tools_at_position.iter().collect();
for i in 0..tools.len() {
for j in (i + 1)..tools.len() {
let (tool1, freq1) = tools[i];
let (tool2, freq2) = tools[j];
if are_known_alternatives(tool1, tool2) {
#[allow(clippy::cast_precision_loss)]
let total = (freq1 + freq2) as f32;
#[allow(clippy::cast_precision_loss)]
let confidence = (*freq1.min(freq2) as f32 / total) * 0.8;
self.add_relationship(ToolRelationship::new(
tool1.clone(),
tool2.clone(),
RelationType::Replaces,
confidence,
));
}
}
}
}
}
fn infer_complement_relationships(&mut self, chains: &[ToolChain]) {
let mut cooccurrence: HashMap<(String, String), u32> = HashMap::new();
for chain in chains {
for i in 0..chain.tools.len() {
for j in (i + 1)..chain.tools.len() {
let tool1 = &chain.tools[i];
let tool2 = &chain.tools[j];
if self.has_relationship(tool1, tool2, &RelationType::DependsOn) {
continue;
}
let key = if tool1 < tool2 {
(tool1.clone(), tool2.clone())
} else {
(tool2.clone(), tool1.clone())
};
*cooccurrence.entry(key).or_insert(0) += chain.frequency;
}
}
}
for ((tool1, tool2), count) in cooccurrence {
if count >= 3 {
#[allow(clippy::cast_precision_loss)]
let confidence = ((count.min(10) as f32) / 10.0) * 0.6;
self.add_relationship(ToolRelationship::new(
tool1,
tool2,
RelationType::Complements,
confidence,
));
}
}
}
fn has_relationship(&self, from: &str, to: &str, rel_type: &RelationType) -> bool {
self.relationships.iter().any(|r| {
((r.from_tool == from && r.to_tool == to) || (r.from_tool == to && r.to_tool == from))
&& r.relationship_type == *rel_type
})
}
#[must_use]
pub fn get_relationships_for_tool(&self, tool_name: &str) -> Vec<&ToolRelationship> {
self.relationships
.iter()
.filter(|r| r.from_tool == tool_name || r.to_tool == tool_name)
.collect()
}
}
#[cfg(feature = "terraphim")]
#[allow(dead_code)] fn are_known_alternatives(tool1: &str, tool2: &str) -> bool {
let alternatives = [
("npm", "yarn"),
("npm", "pnpm"),
("yarn", "pnpm"),
("npx", "bunx"),
("webpack", "vite"),
("webpack", "rollup"),
("jest", "vitest"),
("mocha", "jest"),
("eslint", "biome"),
];
alternatives
.iter()
.any(|(a, b)| (tool1 == *a && tool2 == *b) || (tool1 == *b && tool2 == *a))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_pattern_learner_new() {
let learner = PatternLearner::new();
assert_eq!(learner.promotion_threshold, 3);
assert_eq!(learner.candidate_count(), 0);
}
#[test]
fn test_pattern_learner_with_threshold() {
let learner = PatternLearner::with_threshold(5);
assert_eq!(learner.promotion_threshold, 5);
}
#[test]
fn test_observe_single_tool() {
let mut learner = PatternLearner::new();
learner.observe(
"pytest".to_string(),
"pytest tests/".to_string(),
ToolCategory::Testing,
);
assert_eq!(learner.candidate_count(), 1);
let candidates = learner.get_candidates();
assert_eq!(candidates.len(), 1);
assert_eq!(candidates[0].tool_name, "pytest");
assert_eq!(candidates[0].observations, 1);
}
#[test]
fn test_observe_multiple_times() {
let mut learner = PatternLearner::new();
for i in 0..5 {
learner.observe(
"pytest".to_string(),
format!("pytest tests/test_{i}.py"),
ToolCategory::Testing,
);
}
assert_eq!(learner.candidate_count(), 1);
let candidates = learner.get_candidates();
assert_eq!(candidates[0].observations, 5);
assert!(candidates[0].contexts.len() <= 10); }
#[test]
fn test_promote_candidates_threshold_met() {
let mut learner = PatternLearner::new();
for i in 0..3 {
learner.observe(
"pytest".to_string(),
format!("pytest tests/test_{i}.py"),
ToolCategory::Testing,
);
}
let promoted = learner.promote_candidates();
assert_eq!(promoted.len(), 1);
assert_eq!(promoted[0].tool_name, "pytest");
assert_eq!(promoted[0].observations, 3);
assert!(matches!(promoted[0].category, ToolCategory::Testing));
assert_eq!(learner.candidate_count(), 0); }
#[test]
fn test_promote_candidates_threshold_not_met() {
let mut learner = PatternLearner::new();
for i in 0..2 {
learner.observe(
"pytest".to_string(),
format!("pytest tests/test_{i}.py"),
ToolCategory::Testing,
);
}
let promoted = learner.promote_candidates();
assert_eq!(promoted.len(), 0);
assert_eq!(learner.candidate_count(), 1); }
#[test]
fn test_category_voting() {
let mut learner = PatternLearner::new();
learner.observe(
"tool".to_string(),
"tool test".to_string(),
ToolCategory::Testing,
);
learner.observe(
"tool".to_string(),
"tool test2".to_string(),
ToolCategory::Testing,
);
learner.observe(
"tool".to_string(),
"tool build".to_string(),
ToolCategory::BuildTool,
);
let promoted = learner.promote_candidates();
assert_eq!(promoted.len(), 1);
assert!(matches!(promoted[0].category, ToolCategory::Testing));
}
#[test]
fn test_confidence_calculation() {
let mut votes = HashMap::new();
votes.insert("Testing".to_string(), 3);
votes.insert("BuildTool".to_string(), 1);
let confidence = calculate_confidence(&votes, 4);
assert!((confidence - 0.75).abs() < 0.01); }
#[test]
fn test_infer_category_testing() {
let contexts = vec!["pytest tests/".to_string(), "pytest --verbose".to_string()];
let category = infer_category_from_contexts(&contexts);
assert!(matches!(category, ToolCategory::Testing));
}
#[test]
fn test_infer_category_build_tool() {
let contexts = vec!["webpack build".to_string(), "vite build".to_string()];
let category = infer_category_from_contexts(&contexts);
assert!(matches!(category, ToolCategory::BuildTool));
}
#[test]
fn test_infer_category_linting() {
let contexts = vec!["eslint src/".to_string(), "cargo clippy".to_string()];
let category = infer_category_from_contexts(&contexts);
assert!(matches!(category, ToolCategory::Linting));
}
#[test]
fn test_infer_category_git() {
let contexts = vec!["git commit".to_string(), "git push".to_string()];
let category = infer_category_from_contexts(&contexts);
assert!(matches!(category, ToolCategory::Git));
}
#[test]
fn test_infer_category_package_manager() {
let contexts = vec!["npm install".to_string(), "yarn add".to_string()];
let category = infer_category_from_contexts(&contexts);
assert!(matches!(category, ToolCategory::PackageManager));
}
#[test]
fn test_category_roundtrip() {
let categories = vec![
ToolCategory::PackageManager,
ToolCategory::BuildTool,
ToolCategory::Testing,
ToolCategory::Linting,
ToolCategory::Git,
ToolCategory::CloudDeploy,
ToolCategory::Database,
ToolCategory::Other("custom".to_string()),
];
for category in categories {
let s = category_to_string(&category);
let parsed = string_to_category(&s);
assert_eq!(
std::mem::discriminant(&category),
std::mem::discriminant(&parsed)
);
}
}
#[test]
fn test_get_cache_path() {
let path = get_cache_path();
assert!(path.is_ok());
let path_buf = path.unwrap();
assert!(path_buf.to_string_lossy().contains(".config"));
assert!(path_buf.to_string_lossy().contains("claude-log-analyzer"));
assert!(path_buf.to_string_lossy().contains("learned_patterns.json"));
}
mod proptest_tests {
use super::*;
use proptest::prelude::*;
proptest! {
#[test]
fn test_observe_properties(
tool_name in "[a-z]{3,15}",
command in "[a-z ]{5,30}",
observation_count in 1u32..10
) {
let mut learner = PatternLearner::new();
for _ in 0..observation_count {
learner.observe(
tool_name.clone(),
command.clone(),
ToolCategory::Testing
);
}
prop_assert_eq!(learner.candidate_count(), 1);
let candidates = learner.get_candidates();
prop_assert_eq!(candidates[0].observations, observation_count);
prop_assert_eq!(&candidates[0].tool_name, &tool_name);
}
#[test]
fn test_promotion_threshold_properties(
threshold in 1u32..20,
observations in 1u32..20
) {
let mut learner = PatternLearner::with_threshold(threshold);
for _ in 0..observations {
learner.observe(
"tool".to_string(),
"command".to_string(),
ToolCategory::Testing
);
}
let promoted = learner.promote_candidates();
if observations >= threshold {
prop_assert_eq!(promoted.len(), 1);
prop_assert_eq!(learner.candidate_count(), 0);
} else {
prop_assert_eq!(promoted.len(), 0);
prop_assert_eq!(learner.candidate_count(), 1);
}
}
#[test]
fn test_confidence_properties(
winning_votes in 1u32..100,
losing_votes in 0u32..100
) {
let total = winning_votes + losing_votes;
if total == 0 {
return Ok(());
}
let mut votes = HashMap::new();
votes.insert("Category1".to_string(), winning_votes);
if losing_votes > 0 {
votes.insert("Category2".to_string(), losing_votes);
}
let confidence = calculate_confidence(&votes, total);
prop_assert!((0.0..=1.0).contains(&confidence));
#[allow(clippy::cast_precision_loss)]
let max_votes = winning_votes.max(losing_votes);
let expected = (max_votes as f32) / (total as f32);
prop_assert!((confidence - expected).abs() < 0.01);
}
}
}
#[cfg(feature = "terraphim")]
mod terraphim_tests {
use super::*;
#[test]
fn test_tool_relationship_new() {
let rel = ToolRelationship::new(
"npm".to_string(),
"wrangler".to_string(),
RelationType::DependsOn,
0.8,
);
assert_eq!(rel.from_tool, "npm");
assert_eq!(rel.to_tool, "wrangler");
assert_eq!(rel.relationship_type, RelationType::DependsOn);
assert!((rel.confidence - 0.8).abs() < 0.01);
}
#[test]
fn test_tool_relationship_confidence_clamp() {
let rel = ToolRelationship::new(
"npm".to_string(),
"wrangler".to_string(),
RelationType::DependsOn,
1.5,
);
assert!((rel.confidence - 1.0).abs() < 0.01);
let rel = ToolRelationship::new(
"npm".to_string(),
"wrangler".to_string(),
RelationType::DependsOn,
-0.5,
);
assert!((rel.confidence - 0.0).abs() < 0.01);
}
#[test]
fn test_infer_from_chain_sequential_tools() {
let chain = ToolChain {
tools: vec!["git".to_string(), "npm".to_string(), "wrangler".to_string()],
frequency: 5,
average_time_between_ms: 1000,
typical_agent: Some("devops".to_string()),
success_rate: 0.9,
};
let relationships = ToolRelationship::infer_from_chain(&chain);
assert_eq!(relationships.len(), 2);
for rel in &relationships {
assert_eq!(rel.relationship_type, RelationType::DependsOn);
assert!(rel.confidence > 0.0);
assert!(rel.confidence <= 1.0);
}
}
#[test]
fn test_infer_from_chain_known_dependency() {
let chain = ToolChain {
tools: vec!["npm".to_string(), "wrangler".to_string()],
frequency: 10,
average_time_between_ms: 500,
typical_agent: Some("devops".to_string()),
success_rate: 1.0,
};
let relationships = ToolRelationship::infer_from_chain(&chain);
assert_eq!(relationships.len(), 1);
let rel = &relationships[0];
assert!(rel.confidence > 0.9);
}
#[test]
fn test_knowledge_graph_new() {
let graph = KnowledgeGraph::new();
assert_eq!(graph.relationships.len(), 0);
}
#[test]
fn test_knowledge_graph_add_relationship() {
let mut graph = KnowledgeGraph::new();
let rel = ToolRelationship::new(
"npm".to_string(),
"wrangler".to_string(),
RelationType::DependsOn,
0.8,
);
graph.add_relationship(rel);
assert_eq!(graph.relationships.len(), 1);
}
#[test]
fn test_knowledge_graph_deduplication() {
let mut graph = KnowledgeGraph::new();
let rel1 = ToolRelationship::new(
"npm".to_string(),
"wrangler".to_string(),
RelationType::DependsOn,
0.6,
);
let rel2 = ToolRelationship::new(
"npm".to_string(),
"wrangler".to_string(),
RelationType::DependsOn,
0.8,
);
graph.add_relationship(rel1);
graph.add_relationship(rel2);
assert_eq!(graph.relationships.len(), 1);
let rel = &graph.relationships[0];
assert!((rel.confidence - 0.7).abs() < 0.01);
}
#[test]
fn test_knowledge_graph_build_from_chains() {
let chains = vec![
ToolChain {
tools: vec!["git".to_string(), "npm".to_string()],
frequency: 10,
average_time_between_ms: 500,
typical_agent: Some("developer".to_string()),
success_rate: 0.95,
},
ToolChain {
tools: vec!["npm".to_string(), "wrangler".to_string()],
frequency: 8,
average_time_between_ms: 1000,
typical_agent: Some("devops".to_string()),
success_rate: 0.9,
},
];
let graph = KnowledgeGraph::build_from_chains(&chains);
assert!(!graph.relationships.is_empty());
let depends_on_count = graph
.relationships
.iter()
.filter(|r| r.relationship_type == RelationType::DependsOn)
.count();
assert!(depends_on_count >= 2);
}
#[test]
fn test_knowledge_graph_replacement_relationships() {
let chains = vec![
ToolChain {
tools: vec!["npm".to_string(), "build".to_string()],
frequency: 5,
average_time_between_ms: 1000,
typical_agent: Some("developer".to_string()),
success_rate: 0.9,
},
ToolChain {
tools: vec!["yarn".to_string(), "build".to_string()],
frequency: 5,
average_time_between_ms: 1000,
typical_agent: Some("developer".to_string()),
success_rate: 0.9,
},
];
let graph = KnowledgeGraph::build_from_chains(&chains);
let replaces_count = graph
.relationships
.iter()
.filter(|r| r.relationship_type == RelationType::Replaces)
.count();
assert!(replaces_count > 0);
}
#[test]
fn test_knowledge_graph_get_relationships_for_tool() {
let mut graph = KnowledgeGraph::new();
graph.add_relationship(ToolRelationship::new(
"npm".to_string(),
"wrangler".to_string(),
RelationType::DependsOn,
0.8,
));
graph.add_relationship(ToolRelationship::new(
"git".to_string(),
"npm".to_string(),
RelationType::Complements,
0.7,
));
let npm_rels = graph.get_relationships_for_tool("npm");
assert_eq!(npm_rels.len(), 2);
}
#[test]
fn test_are_known_alternatives() {
assert!(are_known_alternatives("npm", "yarn"));
assert!(are_known_alternatives("yarn", "npm"));
assert!(are_known_alternatives("npx", "bunx"));
assert!(are_known_alternatives("webpack", "vite"));
assert!(!are_known_alternatives("npm", "cargo"));
}
#[test]
fn test_is_known_dependency() {
assert!(is_known_dependency("npm", "wrangler"));
assert!(is_known_dependency("cargo", "clippy"));
assert!(is_known_dependency("git", "npm"));
assert!(!is_known_dependency("random", "tool"));
}
}
}