use std::collections::HashMap;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use std::process::Command;
use crate::metrics::cognitive::{analyze_cognitive, CognitiveOptions, FunctionCognitive};
use crate::quality::churn::{
check_shallow_clone, get_file_churn_detailed,
is_git_repository, ChurnError, FileChurn, FileChurnDetailed,
};
use crate::types::Language;
#[allow(unused_imports)]
use crate::metrics::cognitive::CognitiveReport;
fn get_git_root(path: &Path) -> Option<PathBuf> {
let output = Command::new("git")
.args(["rev-parse", "--show-toplevel"])
.current_dir(path)
.output()
.ok()?;
if output.status.success() {
let root = String::from_utf8_lossy(&output.stdout).trim().to_string();
Some(PathBuf::from(root))
} else {
None
}
}
fn get_analysis_prefix(analysis_path: &Path, git_root: &Path) -> Option<String> {
let canonical_analysis = analysis_path.canonicalize().ok()?;
let canonical_root = git_root.canonicalize().ok()?;
canonical_analysis.strip_prefix(&canonical_root)
.ok()
.map(|p| {
let s = p.to_string_lossy().to_string();
s.trim_start_matches('/').trim_end_matches('/').to_string()
})
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HotspotEntry {
pub file: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub function: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub line: Option<u32>,
pub churn_score: f64,
pub complexity_score: f64,
pub hotspot_score: f64,
pub commit_count: u32,
pub lines_changed: u32,
pub complexity: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub trend: Option<TrendInfo>,
pub recommendation: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub relative_churn: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub knowledge_fragmentation: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub current_loc: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub author_count: Option<u32>,
#[serde(default = "default_algorithm_version")]
pub algorithm_version: u32,
}
fn default_algorithm_version() -> u32 {
1
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TrendInfo {
pub direction: TrendDirection,
pub complexity_delta: i32,
pub period_months: u32,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum TrendDirection {
Improving,
Stable,
Degrading,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ScoringWeights {
pub churn: f64,
pub complexity: f64,
pub knowledge_fragmentation: f64,
pub temporal_coupling: f64,
}
impl Default for ScoringWeights {
fn default() -> Self {
Self {
churn: 0.35,
complexity: 0.35,
knowledge_fragmentation: 0.15,
temporal_coupling: 0.15,
}
}
}
impl ScoringWeights {
pub fn default_phase1() -> Self {
let base = Self::default();
let phase1 = Self {
churn: base.churn,
complexity: base.complexity,
knowledge_fragmentation: base.knowledge_fragmentation,
temporal_coupling: 0.0,
};
phase1.renormalize()
}
pub fn renormalize(&self) -> Self {
let sum = self.churn + self.complexity
+ self.knowledge_fragmentation + self.temporal_coupling;
if sum <= 0.0 {
return self.clone();
}
Self {
churn: self.churn / sum,
complexity: self.complexity / sum,
knowledge_fragmentation: self.knowledge_fragmentation / sum,
temporal_coupling: self.temporal_coupling / sum,
}
}
pub fn for_active_dimensions(&self, active: [bool; 4]) -> Self {
let w = Self {
churn: if active[0] { self.churn } else { 0.0 },
complexity: if active[1] { self.complexity } else { 0.0 },
knowledge_fragmentation: if active[2] { self.knowledge_fragmentation } else { 0.0 },
temporal_coupling: if active[3] { self.temporal_coupling } else { 0.0 },
};
w.renormalize()
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HotspotsSummary {
pub total_files_analyzed: usize,
pub total_commits: u32,
pub time_window_days: u32,
pub hotspot_concentration: f64,
pub recommendation: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub total_bot_commits_filtered: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub avg_knowledge_fragmentation: Option<f64>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HotspotsMetadata {
pub path: String,
pub days: u32,
pub by_function: bool,
pub min_commits: u32,
pub is_shallow: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub shallow_depth: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub bot_commits_filtered: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub recency_halflife: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub scoring_weights: Option<ScoringWeights>,
#[serde(default)]
pub algorithm_version: u32,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HotspotsReport {
pub hotspots: Vec<HotspotEntry>,
pub summary: HotspotsSummary,
pub metadata: HotspotsMetadata,
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub warnings: Vec<String>,
}
#[derive(Debug, Clone)]
pub struct HotspotsOptions {
pub days: u32,
pub top: usize,
pub min_commits: u32,
pub by_function: bool,
pub show_trend: bool,
pub exclude: Vec<String>,
pub threshold: Option<f64>,
pub since: Option<String>,
pub recency_halflife: f64,
pub include_bots: bool,
}
impl Default for HotspotsOptions {
fn default() -> Self {
Self {
days: 365,
top: 20,
min_commits: 3,
by_function: false,
show_trend: false,
exclude: Vec::new(),
threshold: None,
since: None,
recency_halflife: 90.0,
include_bots: false,
}
}
}
impl HotspotsOptions {
pub fn new() -> Self {
Self::default()
}
pub fn with_days(mut self, days: u32) -> Self {
self.days = days;
self
}
pub fn with_top(mut self, top: usize) -> Self {
self.top = top;
self
}
pub fn with_min_commits(mut self, min_commits: u32) -> Self {
self.min_commits = min_commits;
self
}
pub fn with_by_function(mut self, by_function: bool) -> Self {
self.by_function = by_function;
self
}
pub fn with_show_trend(mut self, show_trend: bool) -> Self {
self.show_trend = show_trend;
self
}
pub fn with_exclude(mut self, exclude: Vec<String>) -> Self {
self.exclude = exclude;
self
}
pub fn with_threshold(mut self, threshold: f64) -> Self {
self.threshold = Some(threshold);
self
}
pub fn with_since(mut self, since: String) -> Self {
self.since = Some(since);
self
}
pub fn with_recency_halflife(mut self, days: f64) -> Self {
self.recency_halflife = days;
self
}
pub fn with_include_bots(mut self, include_bots: bool) -> Self {
self.include_bots = include_bots;
self
}
}
#[derive(Debug, Error)]
pub enum HotspotsError {
#[error("Path not found: {0}")]
PathNotFound(PathBuf),
#[error("Not a git repository: {0}")]
NotGitRepository(PathBuf),
#[error("Churn analysis failed: {0}")]
ChurnError(#[from] ChurnError),
#[error("Complexity analysis failed for {file}: {reason}")]
ComplexityError {
file: PathBuf,
reason: String,
},
#[error("I/O error: {0}")]
Io(#[from] std::io::Error),
#[error("No files found to analyze in {0}")]
NoFilesFound(PathBuf),
}
pub fn analyze_hotspots(path: &Path, options: &HotspotsOptions) -> Result<HotspotsReport, HotspotsError> {
if !path.exists() {
return Err(HotspotsError::PathNotFound(path.to_path_buf()));
}
if !is_git_repository(path)? {
return Err(HotspotsError::NotGitRepository(path.to_path_buf()));
}
let mut warnings = Vec::new();
let (is_shallow, shallow_depth) = check_shallow_clone(path)?;
if is_shallow {
let depth_info = shallow_depth
.map(|d| format!(" (~{} commits)", d))
.unwrap_or_default();
warnings.push(format!(
"Repository is a shallow clone{}. Churn analysis may be incomplete.",
depth_info
));
}
let (detailed_churn_raw, total_bot_filtered) = get_file_churn_detailed(
path, options.days, &options.exclude, options.include_bots
)?;
let git_root = get_git_root(path);
let analysis_prefix = git_root.as_ref().and_then(|root| get_analysis_prefix(path, root));
let detailed_churn = remap_detailed_churn_for_analysis(
detailed_churn_raw,
analysis_prefix.as_deref(),
);
let file_churn: HashMap<String, FileChurn> = detailed_churn.iter()
.map(|(k, v)| (k.clone(), v.base.clone()))
.collect();
if file_churn.is_empty() {
warnings.push("No commits found in the specified time window.".to_string());
return Ok(build_empty_hotspots_report(
path,
options,
is_shallow,
shallow_depth,
total_bot_filtered,
warnings,
"No data available for analysis.".to_string(),
));
}
let filtered_churn: HashMap<String, FileChurn> = file_churn
.into_iter()
.filter(|(_, fc)| fc.commit_count >= options.min_commits)
.collect();
if filtered_churn.is_empty() {
warnings.push(format!(
"No files found with {} or more commits.",
options.min_commits
));
return Ok(build_empty_hotspots_report(
path,
options,
is_shallow,
shallow_depth,
total_bot_filtered,
warnings,
"No files meet the minimum commit threshold.".to_string(),
));
}
let hotspots = if options.by_function {
analyze_function_level(path, &filtered_churn, &mut warnings)?
} else {
analyze_file_level(path, &filtered_churn, &mut warnings)?
};
let total_commits: u32 = filtered_churn.values().map(|f| f.commit_count).sum();
let total_files = filtered_churn.len();
let scored = score_hotspots_v2(
path,
options,
detailed_churn,
hotspots,
&mut warnings,
total_commits,
total_files,
);
Ok(HotspotsReport {
hotspots: scored.hotspots,
summary: HotspotsSummary {
total_files_analyzed: total_files,
total_commits,
time_window_days: options.days,
hotspot_concentration: scored.hotspot_concentration,
recommendation: scored.summary_recommendation,
total_bot_commits_filtered: Some(total_bot_filtered),
avg_knowledge_fragmentation: Some(scored.avg_frag),
},
metadata: HotspotsMetadata {
path: path.to_string_lossy().to_string(),
days: options.days,
by_function: options.by_function,
min_commits: options.min_commits,
is_shallow,
shallow_depth,
bot_commits_filtered: Some(total_bot_filtered),
recency_halflife: if options.recency_halflife > 0.0 { Some(options.recency_halflife as u32) } else { None },
scoring_weights: Some(scored.effective_weights.clone()),
algorithm_version: 2,
},
warnings,
})
}
struct ScoredHotspots {
hotspots: Vec<HotspotEntry>,
effective_weights: ScoringWeights,
hotspot_concentration: f64,
avg_frag: f64,
summary_recommendation: String,
}
fn remap_detailed_churn_for_analysis(
detailed_churn_raw: HashMap<String, FileChurnDetailed>,
analysis_prefix: Option<&str>,
) -> HashMap<String, FileChurnDetailed> {
let Some(prefix) = analysis_prefix else {
return detailed_churn_raw;
};
if prefix.is_empty() {
return detailed_churn_raw;
}
detailed_churn_raw
.into_iter()
.filter(|(file_path, _)| {
file_path.starts_with(prefix) || file_path.starts_with(&format!("{}/", prefix))
})
.map(|(file_path, fcd)| {
let relative_path = file_path
.strip_prefix(prefix)
.unwrap_or(&file_path)
.trim_start_matches('/')
.to_string();
(relative_path, fcd)
})
.collect()
}
fn build_empty_hotspots_report(
path: &Path,
options: &HotspotsOptions,
is_shallow: bool,
shallow_depth: Option<u32>,
total_bot_filtered: u32,
warnings: Vec<String>,
recommendation: String,
) -> HotspotsReport {
HotspotsReport {
hotspots: Vec::new(),
summary: HotspotsSummary {
total_files_analyzed: 0,
total_commits: 0,
time_window_days: options.days,
hotspot_concentration: 0.0,
recommendation,
total_bot_commits_filtered: Some(total_bot_filtered),
avg_knowledge_fragmentation: None,
},
metadata: HotspotsMetadata {
path: path.to_string_lossy().to_string(),
days: options.days,
by_function: options.by_function,
min_commits: options.min_commits,
is_shallow,
shallow_depth,
bot_commits_filtered: Some(total_bot_filtered),
recency_halflife: if options.recency_halflife > 0.0 {
Some(options.recency_halflife as u32)
} else {
None
},
scoring_weights: None,
algorithm_version: 2,
},
warnings,
}
}
fn score_hotspots_v2(
path: &Path,
options: &HotspotsOptions,
detailed_churn: HashMap<String, FileChurnDetailed>,
mut hotspots: Vec<HotspotEntry>,
warnings: &mut Vec<String>,
total_commits: u32,
total_files: usize,
) -> ScoredHotspots {
let today = chrono::Utc::now().date_naive();
let halflife = options.recency_halflife;
for hotspot in &mut hotspots {
let full_path = path.join(&hotspot.file);
let loc = if full_path.exists() {
std::fs::read_to_string(&full_path)
.map(|s| s.lines().count() as u32)
.unwrap_or(0)
} else {
0
};
hotspot.current_loc = Some(loc);
if let Some(detail) = detailed_churn.get(&hotspot.file) {
let weighted_lines: f64 = detail
.commits
.iter()
.map(|c| {
let commit_date = chrono::NaiveDate::parse_from_str(
&c.date[..10.min(c.date.len())],
"%Y-%m-%d",
)
.unwrap_or(today);
let age = (today - commit_date).num_days().max(0) as f64;
let weight = recency_weight(age, halflife);
weight * (c.lines_added + c.lines_deleted) as f64
})
.sum();
hotspot.relative_churn = Some(weighted_lines / (loc.max(MIN_LOC_FLOOR)) as f64);
let mut author_counts: HashMap<String, u32> = HashMap::new();
for c in &detail.commits {
*author_counts.entry(c.author_email.clone()).or_insert(0) += 1;
}
let author_vec: Vec<(String, u32)> = author_counts.into_iter().collect();
hotspot.knowledge_fragmentation = Some(knowledge_fragmentation(&author_vec));
hotspot.author_count = Some(detail.base.author_count);
}
}
let churn_values: Vec<f64> = hotspots
.iter()
.map(|h| h.relative_churn.unwrap_or(0.0))
.collect();
let complexity_values: Vec<f64> = hotspots.iter().map(|h| h.complexity as f64).collect();
let frag_values: Vec<f64> = hotspots
.iter()
.map(|h| h.knowledge_fragmentation.unwrap_or(0.0))
.collect();
let churn_has_variance = has_variance(&churn_values);
let complexity_has_variance = has_variance(&complexity_values);
let frag_has_variance = has_variance(&frag_values);
let active = [churn_has_variance, complexity_has_variance, frag_has_variance, false];
if !churn_has_variance && hotspots.len() > 1 {
warnings.push("All files have similar churn. This dimension excluded from scoring.".to_string());
}
if !complexity_has_variance && hotspots.len() > 1 {
warnings.push("All files have similar complexity. This dimension excluded from scoring.".to_string());
}
if !frag_has_variance && hotspots.len() > 1 {
warnings.push("All files have similar knowledge fragmentation. This dimension excluded from scoring.".to_string());
}
if hotspots.len() == 1 {
warnings.push("Only one file analyzed. Consider expanding the search scope.".to_string());
}
let pct_churn = if churn_has_variance {
percentile_ranks(&churn_values)
} else {
vec![0.0; hotspots.len()]
};
let pct_complexity = if complexity_has_variance {
percentile_ranks(&complexity_values)
} else {
vec![0.0; hotspots.len()]
};
let pct_frag = if frag_has_variance {
percentile_ranks(&frag_values)
} else {
vec![0.0; hotspots.len()]
};
let base_weights = ScoringWeights::default_phase1();
let effective_weights = base_weights.for_active_dimensions(active);
for (i, hotspot) in hotspots.iter_mut().enumerate() {
hotspot.churn_score = pct_churn[i];
hotspot.complexity_score = pct_complexity[i];
hotspot.hotspot_score = composite_score_weighted(
pct_churn[i],
pct_complexity[i],
pct_frag[i],
0.0,
&effective_weights,
);
hotspot.recommendation = get_recommendation(hotspot.hotspot_score);
}
hotspots.sort_by(|a, b| {
b.hotspot_score
.partial_cmp(&a.hotspot_score)
.unwrap_or(std::cmp::Ordering::Equal)
});
let top_10_percent = (total_files / 10).max(1);
let top_commits: u32 = hotspots.iter().take(top_10_percent).map(|h| h.commit_count).sum();
let hotspot_concentration = if total_commits > 0 {
(top_commits as f64 / total_commits as f64) * 100.0
} else {
0.0
};
let avg_frag = {
let frags: Vec<f64> = hotspots
.iter()
.filter_map(|h| h.knowledge_fragmentation)
.collect();
if frags.is_empty() {
0.0
} else {
frags.iter().sum::<f64>() / frags.len() as f64
}
};
if let Some(threshold) = options.threshold {
hotspots.retain(|h| h.hotspot_score >= threshold);
}
hotspots.truncate(options.top);
let summary_recommendation = if hotspot_concentration > 70.0 {
"High concentration of changes in few files. Consider breaking up large modules.".to_string()
} else if hotspot_concentration > 40.0 {
"Moderate change concentration. Monitor hotspots for potential refactoring.".to_string()
} else {
"Changes are well distributed across the codebase.".to_string()
};
ScoredHotspots {
hotspots,
effective_weights,
hotspot_concentration,
avg_frag,
summary_recommendation,
}
}
fn analyze_file_level(
path: &Path,
churn_data: &HashMap<String, FileChurn>,
warnings: &mut Vec<String>,
) -> Result<Vec<HotspotEntry>, HotspotsError> {
let mut hotspots = Vec::new();
for (file_path, file_churn) in churn_data {
let full_path = path.join(file_path);
if !full_path.exists() {
continue;
}
if Language::from_path(&full_path).is_none() {
continue;
}
let cognitive_options = CognitiveOptions::new()
.with_threshold(1000) .with_high_threshold(10000);
let max_complexity = match analyze_cognitive(&full_path, &cognitive_options) {
Ok(report) => {
report.functions.iter().map(|f| f.cognitive).max().unwrap_or(0)
}
Err(e) => {
warnings.push(format!("Complexity analysis failed for {}: {}", file_path, e));
0
}
};
hotspots.push(HotspotEntry {
file: file_path.clone(),
function: None,
line: None,
churn_score: 0.0, complexity_score: 0.0,
hotspot_score: 0.0,
commit_count: file_churn.commit_count,
lines_changed: file_churn.lines_changed,
complexity: max_complexity,
trend: None,
recommendation: String::new(),
relative_churn: None,
knowledge_fragmentation: None,
current_loc: None,
author_count: None,
algorithm_version: 2,
});
}
Ok(hotspots)
}
fn analyze_function_level(
path: &Path,
churn_data: &HashMap<String, FileChurn>,
warnings: &mut Vec<String>,
) -> Result<Vec<HotspotEntry>, HotspotsError> {
let mut hotspots = Vec::new();
for (file_path, file_churn) in churn_data {
let full_path = path.join(file_path);
if !full_path.exists() {
continue;
}
if Language::from_path(&full_path).is_none() {
continue;
}
let cognitive_options = CognitiveOptions::new()
.with_threshold(1000)
.with_high_threshold(10000);
let functions: Vec<FunctionCognitive> = match analyze_cognitive(&full_path, &cognitive_options) {
Ok(report) => report.functions,
Err(e) => {
warnings.push(format!("Complexity analysis failed for {}: {}", file_path, e));
continue;
}
};
for func in functions {
hotspots.push(HotspotEntry {
file: file_path.clone(),
function: Some(func.name),
line: Some(func.line),
churn_score: 0.0,
complexity_score: 0.0,
hotspot_score: 0.0,
commit_count: file_churn.commit_count, lines_changed: file_churn.lines_changed,
complexity: func.cognitive,
trend: None,
recommendation: String::new(),
relative_churn: None,
knowledge_fragmentation: None,
current_loc: None,
author_count: None,
algorithm_version: 2,
});
}
}
Ok(hotspots)
}
pub fn normalize_value(value: f64, min: f64, max: f64) -> f64 {
if (max - min).abs() < f64::EPSILON {
return 0.5;
}
((value - min) / (max - min)).clamp(0.0, 1.0)
}
fn get_recommendation(score: f64) -> String {
if score > 0.74 {
"Critical: High churn + high complexity + fragmented knowledge. Prioritize refactoring.".to_string()
} else if score > 0.63 {
"High priority: Frequent changes to complex code.".to_string()
} else if score > 0.50 {
"Medium priority: Consider simplification.".to_string()
} else {
"Monitor for changes.".to_string()
}
}
pub fn calculate_trend(complexity_delta: i32) -> TrendDirection {
if complexity_delta < -2 {
TrendDirection::Improving
} else if complexity_delta > 2 {
TrendDirection::Degrading
} else {
TrendDirection::Stable
}
}
const MIN_LOC_FLOOR: u32 = 10;
pub fn percentile_ranks(values: &[f64]) -> Vec<f64> {
let n = values.len();
if n == 0 {
return Vec::new();
}
if n == 1 {
return vec![1.0];
}
let mut indexed: Vec<(usize, f64)> = values.iter().copied().enumerate().collect();
indexed.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal));
let mut ranks = vec![0.0_f64; n];
let mut i = 0;
while i < n {
let mut j = i;
while j < n && (indexed[j].1 - indexed[i].1).abs() < f64::EPSILON {
j += 1;
}
let avg_rank = (i + 1 + j) as f64 / 2.0;
for k in i..j {
ranks[indexed[k].0] = avg_rank;
}
i = j;
}
let denom = (n - 1) as f64;
ranks.iter().map(|&r| (r - 1.0) / denom).collect()
}
pub fn recency_weight(age_days: f64, halflife_days: f64) -> f64 {
if halflife_days <= 0.0 {
return 1.0; }
let clamped_age = age_days.max(0.0);
let lambda = (2.0_f64).ln() / halflife_days;
(-lambda * clamped_age).exp()
}
pub fn relative_churn(lines_changed: u32, current_loc: u32) -> f64 {
lines_changed as f64 / (current_loc.max(MIN_LOC_FLOOR)) as f64
}
pub use crate::quality::churn::is_bot_author;
pub fn knowledge_fragmentation(author_commits: &[(String, u32)]) -> f64 {
if author_commits.is_empty() {
return 0.0;
}
let total: u32 = author_commits.iter().map(|(_, c)| c).sum();
if total == 0 {
return 0.0;
}
let max_commits = author_commits.iter().map(|(_, c)| *c).max().unwrap_or(0);
let top_fraction = max_commits as f64 / total as f64;
let mut frag = 1.0 - top_fraction;
let threshold = ((total as f64 * 0.05) as u32).max(1);
let minor_count = author_commits
.iter()
.filter(|(_, c)| *c < threshold)
.count();
if minor_count > 3 {
frag = (frag * 1.2).min(1.0);
}
frag
}
pub fn composite_score_weighted(
pct_churn: f64,
pct_complexity: f64,
pct_fragmentation: f64,
pct_temporal_coupling: f64,
weights: &ScoringWeights,
) -> f64 {
weights.churn * pct_churn
+ weights.complexity * pct_complexity
+ weights.knowledge_fragmentation * pct_fragmentation
+ weights.temporal_coupling * pct_temporal_coupling
}
pub fn has_variance(values: &[f64]) -> bool {
if values.len() < 2 {
return false;
}
let first = values[0];
values.iter().any(|&v| (v - first).abs() > f64::EPSILON)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_normalize_value() {
assert!((normalize_value(50.0, 0.0, 100.0) - 0.5).abs() < 0.001);
assert!((normalize_value(0.0, 0.0, 100.0) - 0.0).abs() < 0.001);
assert!((normalize_value(100.0, 0.0, 100.0) - 1.0).abs() < 0.001);
assert!((normalize_value(50.0, 50.0, 50.0) - 0.5).abs() < 0.001);
}
#[test]
fn test_calculate_trend() {
assert_eq!(calculate_trend(-5), TrendDirection::Improving);
assert_eq!(calculate_trend(0), TrendDirection::Stable);
assert_eq!(calculate_trend(2), TrendDirection::Stable);
assert_eq!(calculate_trend(5), TrendDirection::Degrading);
}
#[test]
fn test_get_recommendation() {
assert!(get_recommendation(0.8).contains("Critical"));
assert!(get_recommendation(0.75).contains("Critical"));
assert!(get_recommendation(0.7).contains("High priority"));
assert!(get_recommendation(0.64).contains("High priority"));
assert!(get_recommendation(0.55).contains("Medium priority"));
assert!(get_recommendation(0.51).contains("Medium priority"));
assert!(get_recommendation(0.4).contains("Monitor"));
assert!(get_recommendation(0.2).contains("Monitor"));
}
#[test]
fn test_options_builder() {
let opts = HotspotsOptions::new()
.with_days(30)
.with_top(10)
.with_min_commits(5)
.with_by_function(true);
assert_eq!(opts.days, 30);
assert_eq!(opts.top, 10);
assert_eq!(opts.min_commits, 5);
assert!(opts.by_function);
}
}