linthis 0.19.3

A fast, cross-platform multi-language linter and formatter
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
// Copyright 2024 zhlinh and linthis Project Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found at
//
// https://opensource.org/license/MIT
//
// The above copyright notice and this permission
// notice shall be included in all copies or
// substantial portions of the Software.

//! Complexity analyzer implementation.

use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::time::Instant;

use rayon::prelude::*;
use serde::{Deserialize, Serialize};

use super::languages::{
    GoComplexityAnalyzer, JavaComplexityAnalyzer, PythonComplexityAnalyzer, RustComplexityAnalyzer,
    TypeScriptComplexityAnalyzer,
};
use super::metrics::{FileMetrics, MetricLevel, SummaryStats};
use super::thresholds::Thresholds;

/// Language-specific complexity analyzer trait
pub trait LanguageComplexityAnalyzer: Send + Sync {
    /// Get the analyzer name
    #[allow(dead_code)]
    fn name(&self) -> &str;

    /// Get supported file extensions
    fn extensions(&self) -> &[&str];

    /// Get the language name
    fn language(&self) -> &str;

    /// Analyze a file and return metrics
    fn analyze_file(&self, path: &Path, content: &str) -> Result<FileMetrics, String>;
}

/// Options for complexity analysis
#[derive(Debug, Clone)]
pub struct AnalysisOptions {
    /// Path to analyze
    pub path: PathBuf,
    /// Specific files to analyze (overrides path-based collection when non-empty)
    pub files: Vec<PathBuf>,
    /// File patterns to include
    pub include: Vec<String>,
    /// File patterns to exclude
    pub exclude: Vec<String>,
    /// Complexity threshold for warnings
    pub threshold: Option<u32>,
    /// Output format
    pub format: String,
    /// Include trend analysis
    pub with_trends: bool,
    /// Number of historical runs for trends
    pub trend_count: usize,
    /// Verbose output
    pub verbose: bool,
    /// Parallel processing
    pub parallel: bool,
}

impl Default for AnalysisOptions {
    fn default() -> Self {
        Self {
            path: PathBuf::new(),
            files: Vec::new(),
            include: Vec::new(),
            exclude: Vec::new(),
            threshold: None,
            format: "human".to_string(),
            with_trends: false,
            trend_count: 10,
            verbose: false,
            parallel: true,
        }
    }
}

impl AnalysisOptions {
    pub fn new(path: PathBuf) -> Self {
        Self {
            path,
            ..Default::default()
        }
    }
}

/// Result of complexity analysis
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnalysisResult {
    /// Per-file metrics
    pub files: Vec<FileMetrics>,
    /// Summary statistics
    pub summary: SummaryStats,
    /// Metrics by language
    pub by_language: HashMap<String, SummaryStats>,
    /// Analysis duration in milliseconds
    pub duration_ms: u64,
    /// Files that couldn't be analyzed
    pub errors: Vec<String>,
    /// Thresholds used
    pub thresholds: Thresholds,
}

impl AnalysisResult {
    pub fn new() -> Self {
        Self {
            files: Vec::new(),
            summary: SummaryStats::default(),
            by_language: HashMap::new(),
            duration_ms: 0,
            errors: Vec::new(),
            thresholds: Thresholds::default(),
        }
    }

    /// Get files exceeding complexity threshold
    pub fn high_complexity_files(&self, threshold: u32) -> Vec<&FileMetrics> {
        self.files
            .iter()
            .filter(|f| f.metrics.cyclomatic > threshold)
            .collect()
    }

    /// Get files sorted by complexity (descending)
    pub fn files_by_complexity(&self) -> Vec<&FileMetrics> {
        let mut files: Vec<_> = self.files.iter().collect();
        files.sort_by(|a, b| b.metrics.cyclomatic.cmp(&a.metrics.cyclomatic));
        files
    }

    /// Calculate summary statistics
    pub fn calculate_summary(&mut self) {
        self.summary.total_files = self.files.len();
        self.summary.total_functions = self.files.iter().map(|f| f.functions.len()).sum();
        self.summary.total_loc = self.files.iter().map(|f| f.metrics.loc as u64).sum();
        self.summary.total_sloc = self.files.iter().map(|f| f.metrics.sloc as u64).sum();

        if !self.files.is_empty() {
            let cyclo_sum: u32 = self.files.iter().map(|f| f.metrics.cyclomatic).sum();
            let cogn_sum: u32 = self.files.iter().map(|f| f.metrics.cognitive).sum();

            self.summary.avg_cyclomatic = cyclo_sum as f64 / self.files.len() as f64;
            self.summary.avg_cognitive = cogn_sum as f64 / self.files.len() as f64;
            self.summary.max_cyclomatic = self
                .files
                .iter()
                .map(|f| f.metrics.cyclomatic)
                .max()
                .unwrap_or(0);
            self.summary.max_cognitive = self
                .files
                .iter()
                .map(|f| f.metrics.cognitive)
                .max()
                .unwrap_or(0);
        }

        // Count high complexity
        self.summary.high_complexity_files = self
            .files
            .iter()
            .filter(|f| {
                f.metrics.overall_level() == MetricLevel::High
                    || f.metrics.overall_level() == MetricLevel::Critical
            })
            .count();

        self.summary.high_complexity_functions = self
            .files
            .iter()
            .flat_map(|f| &f.functions)
            .filter(|func| {
                func.metrics.overall_level() == MetricLevel::High
                    || func.metrics.overall_level() == MetricLevel::Critical
            })
            .count();

        // Calculate by language
        let mut by_lang: HashMap<String, Vec<&FileMetrics>> = HashMap::new();
        for file in &self.files {
            by_lang.entry(file.language.clone()).or_default().push(file);
        }

        for (lang, files) in by_lang {
            let avg_cyclomatic = if !files.is_empty() {
                let cyclo_sum: u32 = files.iter().map(|f| f.metrics.cyclomatic).sum();
                cyclo_sum as f64 / files.len() as f64
            } else {
                0.0
            };
            let max_cyclomatic = files
                .iter()
                .map(|f| f.metrics.cyclomatic)
                .max()
                .unwrap_or(0);

            let stats = SummaryStats {
                total_files: files.len(),
                total_functions: files.iter().map(|f| f.functions.len()).sum(),
                total_loc: files.iter().map(|f| f.metrics.loc as u64).sum(),
                total_sloc: files.iter().map(|f| f.metrics.sloc as u64).sum(),
                avg_cyclomatic,
                max_cyclomatic,
                ..SummaryStats::default()
            };

            self.by_language.insert(lang, stats);
        }
    }
}

impl Default for AnalysisResult {
    fn default() -> Self {
        Self::new()
    }
}

/// Main complexity analyzer
pub struct ComplexityAnalyzer {
    analyzers: Vec<Box<dyn LanguageComplexityAnalyzer>>,
}

impl Default for ComplexityAnalyzer {
    fn default() -> Self {
        Self::new()
    }
}

impl ComplexityAnalyzer {
    /// Create a new complexity analyzer with all language analyzers
    pub fn new() -> Self {
        let analyzers: Vec<Box<dyn LanguageComplexityAnalyzer>> = vec![
            Box::new(RustComplexityAnalyzer::new()),
            Box::new(TypeScriptComplexityAnalyzer::new()),
            Box::new(PythonComplexityAnalyzer::new()),
            Box::new(GoComplexityAnalyzer::new()),
            Box::new(JavaComplexityAnalyzer::new()),
        ];

        Self { analyzers }
    }

    /// Get analyzer for a file based on extension
    fn get_analyzer(&self, path: &Path) -> Option<&dyn LanguageComplexityAnalyzer> {
        let ext = path.extension()?.to_str()?;
        self.analyzers
            .iter()
            .find(|a| a.extensions().contains(&ext))
            .map(|a| a.as_ref())
    }

    /// Analyze a directory or file
    pub fn analyze(&self, options: &AnalysisOptions) -> Result<AnalysisResult, String> {
        let start = Instant::now();
        let mut result = AnalysisResult::new();

        // Collect files to analyze
        let files = if !options.files.is_empty() {
            // Use explicit file list (from --staged / --modified)
            options.files.clone()
        } else {
            self.collect_files(&options.path, &options.include, &options.exclude)?
        };

        // Analyze files
        if options.parallel {
            let results: Vec<_> = files
                .par_iter()
                .filter_map(|path| self.analyze_single_file(path))
                .collect();

            for file_result in results {
                match file_result {
                    Ok(metrics) => result.files.push(metrics),
                    Err(e) => result.errors.push(e),
                }
            }
        } else {
            for path in &files {
                match self.analyze_single_file(path) {
                    Some(Ok(metrics)) => result.files.push(metrics),
                    Some(Err(e)) => result.errors.push(e),
                    None => {}
                }
            }
        }

        result.calculate_summary();
        result.duration_ms = start.elapsed().as_millis() as u64;

        Ok(result)
    }

    fn analyze_single_file(&self, path: &Path) -> Option<Result<FileMetrics, String>> {
        let analyzer = self.get_analyzer(path)?;

        let content = match std::fs::read_to_string(path) {
            Ok(c) => c,
            Err(e) => return Some(Err(format!("{}: {}", path.display(), e))),
        };

        Some(analyzer.analyze_file(path, &content))
    }

    fn collect_files(
        &self,
        path: &Path,
        include: &[String],
        exclude: &[String],
    ) -> Result<Vec<PathBuf>, String> {
        let mut files = Vec::new();

        if path.is_file() {
            files.push(path.to_path_buf());
            return Ok(files);
        }

        let walker = walkdir::WalkDir::new(path)
            .follow_links(false)
            .into_iter()
            .filter_entry(|e| {
                let path = e.path();
                let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");

                // Skip hidden files and common non-source directories
                if name.starts_with('.') {
                    return false;
                }

                if path.is_dir() {
                    let skip_dirs = [
                        "node_modules",
                        "target",
                        "build",
                        "dist",
                        "__pycache__",
                        ".git",
                        "vendor",
                    ];
                    if skip_dirs.contains(&name) {
                        return false;
                    }
                }

                true
            });

        for entry in walker.filter_map(|e| e.ok()) {
            let path = entry.path();
            if !path.is_file() {
                continue;
            }

            // Check if we have an analyzer for this file
            if self.get_analyzer(path).is_none() {
                continue;
            }

            // Check exclude patterns
            let path_str = path.to_string_lossy();
            let should_exclude = exclude.iter().any(|pattern| {
                globset::Glob::new(pattern)
                    .ok()
                    .and_then(|g| g.compile_matcher().is_match(&*path_str).then_some(()))
                    .is_some()
            });

            if should_exclude {
                continue;
            }

            // Check include patterns (if any)
            if !include.is_empty() {
                let should_include = include.iter().any(|pattern| {
                    globset::Glob::new(pattern)
                        .ok()
                        .and_then(|g| g.compile_matcher().is_match(&*path_str).then_some(()))
                        .is_some()
                });

                if !should_include {
                    continue;
                }
            }

            files.push(path.to_path_buf());
        }

        Ok(files)
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_analysis_options_default() {
        let options = AnalysisOptions::default();
        assert!(options.include.is_empty());
        assert!(options.exclude.is_empty());
        assert!(options.parallel);
    }

    #[test]
    fn test_analysis_result_default() {
        let result = AnalysisResult::new();
        assert!(result.files.is_empty());
        assert_eq!(result.duration_ms, 0);
    }

    #[test]
    fn test_complexity_analyzer_creation() {
        let analyzer = ComplexityAnalyzer::new();
        let temp_dir = tempfile::tempdir().unwrap();

        let options = AnalysisOptions::new(temp_dir.path().to_path_buf());
        let result = analyzer.analyze(&options);
        assert!(result.is_ok());
    }
}