code complexity & repetition analysis tool

feat: basic lcov ingestion and reporting

+1128 -58
+2
.gitignore
··· 19 19 # and can be added to the global gitignore or merged into this file. For a more nuclear 20 20 # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 21 #.idea/ 22 + .sandbox/ 23 + lcov.info
+18
CHANGELOG.md
··· 1 + # Changelog 2 + 3 + All notable changes to this project will be documented in this file. 4 + 5 + The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 + and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 + 8 + ## [0.1.0] - 2026-01-13 9 + 10 + ### Added 11 + 12 + - LOC command extension: file ranking and directory aggregation. 13 + - Syntax highlighting using `syntect`. 14 + - Configuration saving functionality. 15 + - Core utilities and commands. 16 + - Clone detector implementation (Rabin-Karp). 17 + - Cyclomatic complexity and LOC metrics. 18 + - mdbook Documentation with example/sample files.
+34 -3
Cargo.lock
··· 329 329 checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" 330 330 331 331 [[package]] 332 + name = "lcov" 333 + version = "0.8.1" 334 + source = "registry+https://github.com/rust-lang/crates.io-index" 335 + checksum = "1ccfa6d5e585a884db65b37f38184e4364eaf74d884ac35d0a90fe9baf80b723" 336 + dependencies = [ 337 + "thiserror 1.0.69", 338 + ] 339 + 340 + [[package]] 332 341 name = "libc" 333 342 version = "0.2.177" 334 343 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 370 379 dependencies = [ 371 380 "anyhow", 372 381 "ignore", 382 + "lcov", 383 + "owo-colors", 373 384 "serde", 374 385 "serde_json", 375 386 "tempfile", 376 - "thiserror", 387 + "thiserror 2.0.17", 377 388 "toml", 378 389 "walkdir", 379 390 ] ··· 640 651 "serde", 641 652 "serde_derive", 642 653 "serde_json", 643 - "thiserror", 654 + "thiserror 2.0.17", 644 655 "walkdir", 645 656 "yaml-rust", 646 657 ] ··· 660 671 661 672 [[package]] 662 673 name = "thiserror" 674 + version = "1.0.69" 675 + source = "registry+https://github.com/rust-lang/crates.io-index" 676 + checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" 677 + dependencies = [ 678 + "thiserror-impl 1.0.69", 679 + ] 680 + 681 + [[package]] 682 + name = "thiserror" 663 683 version = "2.0.17" 664 684 source = "registry+https://github.com/rust-lang/crates.io-index" 665 685 checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" 666 686 dependencies = [ 667 - "thiserror-impl", 687 + "thiserror-impl 2.0.17", 688 + ] 689 + 690 + [[package]] 691 + name = "thiserror-impl" 692 + version = "1.0.69" 693 + source = "registry+https://github.com/rust-lang/crates.io-index" 694 + checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" 695 + dependencies = [ 696 + "proc-macro2", 697 + "quote", 698 + "syn", 668 699 ] 669 700 670 701 [[package]]
+1 -55
README
··· 9 9 - Goals and metrics 10 10 - Algorithms used 11 11 - Project structure 12 - - Beyond-MVP roadmap 13 12 - Outputs 14 13 - Philosophy 15 14 ··· 75 74 =============================================================================== 76 75 FUTURE 77 76 ------------------------------------------------------------------------------- 78 - A. AST-Based Clone Detection 79 - - Compare abstract syntax trees or subtrees. 80 - - Identifies clones resilient to renamed variables or formatting changes. 81 - - Requires per-language AST adapters. 82 - 83 - B. Cognitive Complexity 84 - - Scores human comprehension cost. 85 - - Rewards flattened control flow, penalizes deep nesting. 86 - - Requires AST-level traversal. 87 - 88 - C. Maintainability Index 89 - - Combines Cyclomatic, Halstead, and LOC into a single number. 90 - - Good for dashboards and longitudinal tracking. 91 - 92 - D. Semantic Clone Detection 93 - - Goes beyond syntax: identifies logically equivalent code. 94 - - Requires control/data-flow analysis. 95 - 96 - E. Dependency Metrics 97 - - Coupling, fan-in/fan-out, depth of inheritance. 98 - - Requires language-specific type-resolution or symbol graph extraction. 99 - 100 - F. Hotspot Analysis 101 - - Combine Git history + complexity metrics. 102 - - Identify files that change often AND are complex. 103 - 104 - G. Incremental Mode 105 - - Cache hashes/graphs. 106 - - Analyze only changed files and touched boundaries. 107 - 108 - H. Rich Reports 109 - - HTML dashboards 110 - - SVG graphs (CFG visualization) 111 - - JSON with stable schema for CI systems 112 - 77 + See todo.txt 113 78 =============================================================================== 114 79 CRATES 115 - 116 80 -------------------------------------------------------------------------------- 117 81 core 118 82 tokenizer ··· 204 168 4. No global state; everything streamed and incremental. 205 169 5. Developer-friendly reporting and clear severity levels. 206 170 6. Configurable thresholds 207 - 208 - =============================================================================== 209 - SUMMARY 210 - 211 - MVP: 212 - - LOC 213 - - Cyclomatic Complexity 214 - - Rabin-Karp Clone Detection 215 - - Halstead Complexity 216 - 217 - Beyond MVP: 218 - - AST-based clones 219 - - Cognitive Complexity 220 - - Maintainability Index 221 - - Coupling metrics 222 - - Call graphs and data-flow graphs 223 - - Hotspot analysis 224 - - Incremental scanning 225 171 226 172 =============================================================================== 227 173 REFERENCES
+66
crates/cli/src/commands/coverage.rs
··· 1 + use anyhow::Result; 2 + use mccabre_core::coverage::{FileCoverage, parse_coverage_from_file}; 3 + use mccabre_core::reporter::{coverage_jsonl::JsonlReporter, coverage_term::report_coverage}; 4 + use owo_colors::OwoColorize; 5 + use std::path::PathBuf; 6 + 7 + pub fn run(from: PathBuf, jsonl: Option<PathBuf>, repo_root: Option<PathBuf>) -> Result<()> { 8 + if !from.exists() { 9 + eprintln!("{}", format!("LCOV file not found: {}", from.display()).red()); 10 + std::process::exit(1); 11 + } 12 + 13 + let report = parse_coverage_from_file(&from, repo_root.as_deref())?; 14 + 15 + if report.files.is_empty() { 16 + eprintln!("{}", "No coverage data found".yellow()); 17 + return Ok(()); 18 + } 19 + 20 + if let Some(jsonl_path) = jsonl { 21 + let mut reporter = JsonlReporter::new(); 22 + reporter.add_report(&report); 23 + reporter.write_to_file(&jsonl_path)?; 24 + 25 + println!( 26 + "{}", 27 + format!("JSONL report written to: {}", jsonl_path.display()) 28 + .green() 29 + .bold() 30 + ); 31 + } 32 + 33 + println!("{}", report_coverage(&report)); 34 + 35 + Ok(()) 36 + } 37 + 38 + pub fn run_file_view(path: PathBuf, from: PathBuf) -> Result<()> { 39 + if !from.exists() { 40 + eprintln!("{}", format!("LCOV file not found: {}", from.display()).red()); 41 + std::process::exit(1); 42 + } 43 + 44 + let report = parse_coverage_from_file(&from, None)?; 45 + 46 + let file_coverage = report.files.iter().find(|f| f.path == path.to_string_lossy()); 47 + 48 + match file_coverage { 49 + Some(file) => { 50 + println!("{}", report_coverage_for_file(file)); 51 + } 52 + None => { 53 + eprintln!( 54 + "{}", 55 + format!("File not found in coverage data: {}", path.display()).red() 56 + ); 57 + std::process::exit(1); 58 + } 59 + } 60 + 61 + Ok(()) 62 + } 63 + 64 + fn report_coverage_for_file(file: &FileCoverage) -> String { 65 + mccabre_core::reporter::coverage_term::format_file_coverage(file, 0) 66 + }
+1
crates/cli/src/commands/mod.rs
··· 1 1 pub mod analyze; 2 2 pub mod clones; 3 3 pub mod complexity; 4 + pub mod coverage; 4 5 pub mod dump_config; 5 6 pub mod loc;
+26
crates/cli/src/main.rs
··· 134 134 #[arg(long)] 135 135 no_gitignore: bool, 136 136 }, 137 + 138 + /// Analyze code coverage from LCOV data 139 + Coverage { 140 + /// Path to LCOV file 141 + #[arg(long, value_name = "PATH")] 142 + from: PathBuf, 143 + 144 + /// Output as JSONL to file 145 + #[arg(long, value_name = "PATH")] 146 + jsonl: Option<PathBuf>, 147 + 148 + /// Repository root for path normalization 149 + #[arg(long, value_name = "PATH")] 150 + repo_root: Option<PathBuf>, 151 + 152 + /// View detailed coverage for a specific file 153 + #[arg(long, value_name = "PATH")] 154 + file: Option<PathBuf>, 155 + }, 137 156 } 138 157 139 158 fn main() -> Result<()> { ··· 173 192 }; 174 193 175 194 commands::loc::run(path, json, rank_by, rank_dirs, config, !no_gitignore) 195 + } 196 + Commands::Coverage { from, jsonl, repo_root, file } => { 197 + if let Some(file) = file { 198 + commands::coverage::run_file_view(file, from) 199 + } else { 200 + commands::coverage::run(from, jsonl, repo_root) 201 + } 176 202 } 177 203 } 178 204 }
+2
crates/core/Cargo.toml
··· 5 5 6 6 [dependencies] 7 7 ignore = "0.4.25" 8 + lcov = "0.8" 9 + owo-colors = "4.2.3" 8 10 serde = { version = "1.0", features = ["derive"] } 9 11 serde_json = "1.0" 10 12 thiserror = "2.0"
+153
crates/core/src/coverage/lcov.rs
··· 1 + use crate::{MccabreError, Result}; 2 + use std::collections::BTreeMap; 3 + use std::path::Path; 4 + 5 + pub fn parse_lcov_file(path: &Path, repo_root: Option<&Path>) -> Result<Vec<FileCoverage>> { 6 + let content = std::fs::read_to_string(path) 7 + .map_err(|e| MccabreError::Io(std::io::Error::other(format!("Failed to read LCOV file: {e}"))))?; 8 + 9 + parse_lcov_content(&content, repo_root) 10 + } 11 + 12 + pub fn parse_lcov_content(content: &str, repo_root: Option<&Path>) -> Result<Vec<FileCoverage>> { 13 + let mut files: std::collections::HashMap<String, BTreeMap<u32, u64>> = std::collections::HashMap::new(); 14 + let mut current_file: Option<String> = None; 15 + 16 + for line in content.lines() { 17 + let line = line.trim(); 18 + 19 + if line.is_empty() { 20 + continue; 21 + } 22 + 23 + if let Some(rest) = line.strip_prefix("SF:") { 24 + let path = super::paths::normalize_path(rest, repo_root); 25 + current_file = Some(path); 26 + files.entry(current_file.clone().unwrap()).or_default(); 27 + } else if let Some(rest) = line.strip_prefix("DA:") { 28 + if let Some(ref file) = current_file 29 + && let Some((line_num, count)) = rest.split_once(',') 30 + && let (Ok(line_num), Ok(count)) = (line_num.parse::<u32>(), count.parse::<u64>()) 31 + { 32 + files.entry(file.clone()).or_default().insert(line_num, count); 33 + } 34 + } else if line == "end_of_record" { 35 + current_file = None; 36 + } 37 + } 38 + 39 + let mut file_coverages = Vec::new(); 40 + for (path, lines) in files { 41 + file_coverages.push(FileCoverage::new(path, lines)); 42 + } 43 + 44 + file_coverages.sort_by(|a, b| { 45 + a.summary 46 + .rate 47 + .partial_cmp(&b.summary.rate) 48 + .unwrap_or(std::cmp::Ordering::Equal) 49 + }); 50 + 51 + Ok(file_coverages) 52 + } 53 + 54 + use super::model::FileCoverage; 55 + 56 + #[cfg(test)] 57 + mod tests { 58 + use super::*; 59 + 60 + #[test] 61 + fn test_parse_simple_lcov() { 62 + let lcov = r#"SF:test.rs 63 + DA:1,10 64 + DA:2,5 65 + DA:3,0 66 + end_of_record 67 + "#; 68 + 69 + let files = parse_lcov_content(lcov, None).unwrap(); 70 + assert_eq!(files.len(), 1); 71 + assert_eq!(files[0].path, "test.rs"); 72 + assert_eq!(files[0].lines.len(), 3); 73 + assert_eq!(files[0].lines.get(&1), Some(&10)); 74 + assert_eq!(files[0].lines.get(&2), Some(&5)); 75 + assert_eq!(files[0].lines.get(&3), Some(&0)); 76 + } 77 + 78 + #[test] 79 + fn test_parse_multiple_files() { 80 + let lcov = r#"SF:test1.rs 81 + DA:1,10 82 + DA:2,5 83 + end_of_record 84 + SF:test2.rs 85 + DA:1,1 86 + DA:2,0 87 + DA:3,0 88 + end_of_record 89 + "#; 90 + 91 + let files = parse_lcov_content(lcov, None).unwrap(); 92 + assert_eq!(files.len(), 2); 93 + assert!(files.iter().any(|f| f.path == "test1.rs")); 94 + assert!(files.iter().any(|f| f.path == "test2.rs")); 95 + } 96 + 97 + #[test] 98 + fn test_parse_with_repo_root() { 99 + let lcov = r#"SF:/repo/src/lib.rs 100 + DA:1,10 101 + DA:2,0 102 + end_of_record 103 + "#; 104 + 105 + let root = Path::new("/repo"); 106 + let files = parse_lcov_content(lcov, Some(root)).unwrap(); 107 + assert_eq!(files.len(), 1); 108 + assert_eq!(files[0].path, "src/lib.rs"); 109 + } 110 + 111 + #[test] 112 + fn test_parse_empty_lcov() { 113 + let lcov = ""; 114 + let files = parse_lcov_content(lcov, None).unwrap(); 115 + assert!(files.is_empty()); 116 + } 117 + 118 + #[test] 119 + fn test_parse_invalid_lines() { 120 + let lcov = r#"SF:test.rs 121 + DA:invalid,10 122 + DA:1,5 123 + end_of_record 124 + "#; 125 + 126 + let files = parse_lcov_content(lcov, None).unwrap(); 127 + assert_eq!(files.len(), 1); 128 + assert_eq!(files[0].lines.get(&1), Some(&5)); 129 + } 130 + 131 + #[test] 132 + fn test_parse_sorted_by_coverage_rate() { 133 + let lcov = r#"SF:full.rs 134 + DA:1,10 135 + DA:2,10 136 + end_of_record 137 + SF:partial.rs 138 + DA:1,10 139 + DA:2,0 140 + end_of_record 141 + SF:none.rs 142 + DA:1,0 143 + DA:2,0 144 + end_of_record 145 + "#; 146 + 147 + let files = parse_lcov_content(lcov, None).unwrap(); 148 + assert_eq!(files.len(), 3); 149 + assert_eq!(files[0].path, "none.rs"); 150 + assert_eq!(files[1].path, "partial.rs"); 151 + assert_eq!(files[2].path, "full.rs"); 152 + } 153 + }
+126
crates/core/src/coverage/misses.rs
··· 1 + use std::collections::BTreeMap; 2 + 3 + pub fn compute_miss_ranges(lines: &BTreeMap<u32, u64>) -> Vec<(u32, u32)> { 4 + let mut miss_ranges = Vec::new(); 5 + let mut current_range: Option<(u32, u32)> = None; 6 + 7 + for (&line, &count) in lines { 8 + if count == 0 { 9 + match current_range { 10 + None => current_range = Some((line, line)), 11 + Some((start, end)) => { 12 + if line == end + 1 { 13 + current_range = Some((start, line)); 14 + } else { 15 + miss_ranges.push(current_range.unwrap()); 16 + current_range = Some((line, line)); 17 + } 18 + } 19 + } 20 + } else if let Some(range) = current_range { 21 + miss_ranges.push(range); 22 + current_range = None; 23 + } 24 + } 25 + 26 + if let Some(range) = current_range { 27 + miss_ranges.push(range); 28 + } 29 + 30 + miss_ranges 31 + } 32 + 33 + #[cfg(test)] 34 + mod tests { 35 + use super::*; 36 + 37 + #[test] 38 + fn test_no_misses() { 39 + let mut lines = BTreeMap::new(); 40 + lines.insert(1, 10); 41 + lines.insert(2, 5); 42 + lines.insert(3, 1); 43 + 44 + let ranges = compute_miss_ranges(&lines); 45 + assert!(ranges.is_empty()); 46 + } 47 + 48 + #[test] 49 + fn test_single_miss() { 50 + let mut lines = BTreeMap::new(); 51 + lines.insert(1, 10); 52 + lines.insert(2, 0); 53 + lines.insert(3, 5); 54 + 55 + let ranges = compute_miss_ranges(&lines); 56 + assert_eq!(ranges, vec![(2, 2)]); 57 + } 58 + 59 + #[test] 60 + fn test_consecutive_misses() { 61 + let mut lines = BTreeMap::new(); 62 + lines.insert(1, 10); 63 + lines.insert(2, 0); 64 + lines.insert(3, 0); 65 + lines.insert(4, 0); 66 + lines.insert(5, 5); 67 + 68 + let ranges = compute_miss_ranges(&lines); 69 + assert_eq!(ranges, vec![(2, 4)]); 70 + } 71 + 72 + #[test] 73 + fn test_multiple_miss_ranges() { 74 + let mut lines = BTreeMap::new(); 75 + lines.insert(1, 10); 76 + lines.insert(2, 0); 77 + lines.insert(3, 0); 78 + lines.insert(4, 5); 79 + lines.insert(5, 0); 80 + lines.insert(6, 10); 81 + 82 + let ranges = compute_miss_ranges(&lines); 83 + assert_eq!(ranges, vec![(2, 3), (5, 5)]); 84 + } 85 + 86 + #[test] 87 + fn test_all_misses() { 88 + let mut lines = BTreeMap::new(); 89 + lines.insert(1, 0); 90 + lines.insert(2, 0); 91 + lines.insert(3, 0); 92 + 93 + let ranges = compute_miss_ranges(&lines); 94 + assert_eq!(ranges, vec![(1, 3)]); 95 + } 96 + 97 + #[test] 98 + fn test_empty_lines() { 99 + let lines = BTreeMap::new(); 100 + let ranges = compute_miss_ranges(&lines); 101 + assert!(ranges.is_empty()); 102 + } 103 + 104 + #[test] 105 + fn test_miss_at_start() { 106 + let mut lines = BTreeMap::new(); 107 + lines.insert(1, 0); 108 + lines.insert(2, 0); 109 + lines.insert(3, 10); 110 + 111 + let ranges = compute_miss_ranges(&lines); 112 + assert_eq!(ranges, vec![(1, 2)]); 113 + } 114 + 115 + #[test] 116 + fn test_miss_at_end() { 117 + let mut lines = BTreeMap::new(); 118 + lines.insert(1, 10); 119 + lines.insert(2, 5); 120 + lines.insert(3, 0); 121 + lines.insert(4, 0); 122 + 123 + let ranges = compute_miss_ranges(&lines); 124 + assert_eq!(ranges, vec![(3, 4)]); 125 + } 126 + }
+60
crates/core/src/coverage/mod.rs
··· 1 + pub mod lcov; 2 + pub mod misses; 3 + pub mod model; 4 + pub mod paths; 5 + 6 + pub use lcov::parse_lcov_content; 7 + pub use lcov::parse_lcov_file; 8 + pub use model::{CoverageReport, CoverageSummary, FileCoverage}; 9 + 10 + use crate::Result; 11 + 12 + pub fn parse_coverage_from_file(path: &std::path::Path, repo_root: Option<&std::path::Path>) -> Result<CoverageReport> { 13 + let files = lcov::parse_lcov_file(path, repo_root)?; 14 + Ok(CoverageReport::new(files)) 15 + } 16 + 17 + pub fn parse_coverage_from_content(content: &str, repo_root: Option<&std::path::Path>) -> Result<CoverageReport> { 18 + let files = lcov::parse_lcov_content(content, repo_root)?; 19 + Ok(CoverageReport::new(files)) 20 + } 21 + 22 + #[cfg(test)] 23 + mod tests { 24 + use super::*; 25 + 26 + #[test] 27 + fn test_parse_coverage_from_content() { 28 + let lcov = r#"SF:test.rs 29 + DA:1,10 30 + DA:2,0 31 + DA:3,5 32 + end_of_record 33 + "#; 34 + 35 + let report = parse_coverage_from_content(lcov, None).unwrap(); 36 + assert_eq!(report.files.len(), 1); 37 + assert_eq!(report.totals.total, 3); 38 + assert_eq!(report.totals.hit, 2); 39 + assert_eq!(report.totals.miss, 1); 40 + } 41 + 42 + #[test] 43 + fn test_parse_coverage_multiple_files() { 44 + let lcov = r#"SF:test1.rs 45 + DA:1,10 46 + DA:2,0 47 + end_of_record 48 + SF:test2.rs 49 + DA:1,5 50 + DA:2,5 51 + end_of_record 52 + "#; 53 + 54 + let report = parse_coverage_from_content(lcov, None).unwrap(); 55 + assert_eq!(report.files.len(), 2); 56 + assert_eq!(report.totals.total, 4); 57 + assert_eq!(report.totals.hit, 3); 58 + assert_eq!(report.totals.miss, 1); 59 + } 60 + }
+119
crates/core/src/coverage/model.rs
··· 1 + use serde::{Deserialize, Serialize}; 2 + 3 + /// Coverage report for the entire codebase 4 + #[derive(Debug, Clone, Serialize, Deserialize)] 5 + pub struct CoverageReport { 6 + pub files: Vec<FileCoverage>, 7 + pub totals: CoverageSummary, 8 + } 9 + 10 + impl CoverageReport { 11 + pub fn new(files: Vec<FileCoverage>) -> Self { 12 + let totals = CoverageSummary::from_files(&files); 13 + Self { files, totals } 14 + } 15 + } 16 + 17 + /// Coverage data for a single file 18 + #[derive(Debug, Clone, Serialize, Deserialize)] 19 + pub struct FileCoverage { 20 + pub path: String, 21 + pub lines: std::collections::BTreeMap<u32, u64>, 22 + pub miss_ranges: Vec<(u32, u32)>, 23 + pub summary: CoverageSummary, 24 + } 25 + 26 + impl FileCoverage { 27 + pub fn new(path: String, lines: std::collections::BTreeMap<u32, u64>) -> Self { 28 + let summary = CoverageSummary::from_lines(&lines); 29 + let miss_ranges = super::misses::compute_miss_ranges(&lines); 30 + Self { path, lines, miss_ranges, summary } 31 + } 32 + } 33 + 34 + /// Coverage summary statistics 35 + #[derive(Debug, Clone, Serialize, Deserialize)] 36 + pub struct CoverageSummary { 37 + pub total: usize, 38 + pub hit: usize, 39 + pub miss: usize, 40 + pub rate: f64, 41 + } 42 + 43 + impl CoverageSummary { 44 + pub fn from_lines(lines: &std::collections::BTreeMap<u32, u64>) -> Self { 45 + let total = lines.len(); 46 + let hit = lines.values().filter(|&&c| c > 0).count(); 47 + let miss = lines.values().filter(|&&c| c == 0).count(); 48 + let rate = if total > 0 { (hit as f64 / total as f64) * 100.0 } else { 0.0 }; 49 + 50 + Self { total, hit, miss, rate } 51 + } 52 + 53 + pub fn from_files(files: &[FileCoverage]) -> Self { 54 + let total: usize = files.iter().map(|f| f.summary.total).sum(); 55 + let hit: usize = files.iter().map(|f| f.summary.hit).sum(); 56 + let miss: usize = files.iter().map(|f| f.summary.miss).sum(); 57 + let rate = if total > 0 { (hit as f64 / total as f64) * 100.0 } else { 0.0 }; 58 + 59 + Self { total, hit, miss, rate } 60 + } 61 + } 62 + 63 + #[cfg(test)] 64 + mod tests { 65 + use super::*; 66 + 67 + #[test] 68 + fn test_coverage_summary_full() { 69 + let mut lines = std::collections::BTreeMap::new(); 70 + lines.insert(1, 10); 71 + lines.insert(2, 5); 72 + lines.insert(3, 1); 73 + 74 + let summary = CoverageSummary::from_lines(&lines); 75 + assert_eq!(summary.total, 3); 76 + assert_eq!(summary.hit, 3); 77 + assert_eq!(summary.miss, 0); 78 + assert_eq!(summary.rate, 100.0); 79 + } 80 + 81 + #[test] 82 + fn test_coverage_summary_partial() { 83 + let mut lines = std::collections::BTreeMap::new(); 84 + lines.insert(1, 10); 85 + lines.insert(2, 0); 86 + lines.insert(3, 5); 87 + 88 + let summary = CoverageSummary::from_lines(&lines); 89 + assert_eq!(summary.total, 3); 90 + assert_eq!(summary.hit, 2); 91 + assert_eq!(summary.miss, 1); 92 + assert!((summary.rate - 66.66666666666666).abs() < 0.0001); 93 + } 94 + 95 + #[test] 96 + fn test_coverage_summary_empty() { 97 + let lines = std::collections::BTreeMap::new(); 98 + let summary = CoverageSummary::from_lines(&lines); 99 + assert_eq!(summary.total, 0); 100 + assert_eq!(summary.hit, 0); 101 + assert_eq!(summary.miss, 0); 102 + assert_eq!(summary.rate, 0.0); 103 + } 104 + 105 + #[test] 106 + fn test_file_coverage() { 107 + let mut lines = std::collections::BTreeMap::new(); 108 + lines.insert(1, 10); 109 + lines.insert(2, 0); 110 + lines.insert(3, 5); 111 + 112 + let file = FileCoverage::new("test.rs".to_string(), lines.clone()); 113 + assert_eq!(file.path, "test.rs"); 114 + assert_eq!(file.summary.total, 3); 115 + assert_eq!(file.summary.hit, 2); 116 + assert_eq!(file.summary.miss, 1); 117 + assert_eq!(file.miss_ranges, vec![(2, 2)]); 118 + } 119 + }
+60
crates/core/src/coverage/paths.rs
··· 1 + use std::path::{Path, PathBuf}; 2 + 3 + pub fn normalize_path(path: &str, repo_root: Option<&Path>) -> String { 4 + let path = PathBuf::from(path); 5 + 6 + let normalized = if let Some(root) = repo_root { 7 + if let Ok(stripped) = path.strip_prefix(root) { 8 + if stripped.as_os_str().is_empty() { PathBuf::from(".") } else { stripped.to_path_buf() } 9 + } else { 10 + path 11 + } 12 + } else { 13 + path 14 + }; 15 + 16 + normalized.display().to_string() 17 + } 18 + 19 + #[cfg(test)] 20 + mod tests { 21 + use super::*; 22 + 23 + #[test] 24 + fn test_normalize_path_no_root() { 25 + let path = "/absolute/path/to/file.rs"; 26 + let normalized = normalize_path(path, None); 27 + assert_eq!(normalized, path); 28 + } 29 + 30 + #[test] 31 + fn test_normalize_path_with_root() { 32 + let path = "/repo/src/lib.rs"; 33 + let root = Path::new("/repo"); 34 + let normalized = normalize_path(path, Some(root)); 35 + assert_eq!(normalized, "src/lib.rs"); 36 + } 37 + 38 + #[test] 39 + fn test_normalize_path_no_match() { 40 + let path = "/other/path/file.rs"; 41 + let root = Path::new("/repo"); 42 + let normalized = normalize_path(path, Some(root)); 43 + assert_eq!(normalized, path); 44 + } 45 + 46 + #[test] 47 + fn test_normalize_path_relative() { 48 + let path = "src/lib.rs"; 49 + let normalized = normalize_path(path, None); 50 + assert_eq!(normalized, path); 51 + } 52 + 53 + #[test] 54 + fn test_normalize_path_exact_match() { 55 + let path = "/repo/src/lib.rs"; 56 + let root = Path::new("/repo/src/lib.rs"); 57 + let normalized = normalize_path(path, Some(root)); 58 + assert_eq!(normalized, "."); 59 + } 60 + }
+1
crates/core/src/lib.rs
··· 1 1 pub mod cloner; 2 2 pub mod complexity; 3 3 pub mod config; 4 + pub mod coverage; 4 5 pub mod error; 5 6 pub mod loader; 6 7 pub mod reporter;
crates/core/src/reporter.rs crates/core/src/reporter/legacy.rs
+139
crates/core/src/reporter/coverage_jsonl.rs
··· 1 + use crate::coverage::{CoverageReport, FileCoverage}; 2 + use std::io::Write; 3 + 4 + pub struct JsonlReporter { 5 + output: Vec<String>, 6 + } 7 + 8 + impl JsonlReporter { 9 + pub fn new() -> Self { 10 + Self { output: Vec::new() } 11 + } 12 + 13 + pub fn add_file(&mut self, file: &FileCoverage) { 14 + let record = serde_json::to_string(file).expect("Failed to serialize file coverage"); 15 + self.output.push(record); 16 + } 17 + 18 + pub fn add_report(&mut self, report: &CoverageReport) { 19 + for file in &report.files { 20 + self.add_file(file); 21 + } 22 + } 23 + 24 + pub fn write_to_file(&self, path: &std::path::Path) -> std::io::Result<()> { 25 + let mut file = std::fs::File::create(path)?; 26 + for line in &self.output { 27 + writeln!(file, "{}", line)?; 28 + } 29 + Ok(()) 30 + } 31 + 32 + pub fn as_string(&self) -> String { 33 + self.output.join("\n") 34 + } 35 + } 36 + 37 + impl Default for JsonlReporter { 38 + fn default() -> Self { 39 + Self::new() 40 + } 41 + } 42 + 43 + #[cfg(test)] 44 + mod tests { 45 + use super::*; 46 + use crate::coverage::FileCoverage; 47 + use std::collections::BTreeMap; 48 + use tempfile::tempdir; 49 + 50 + #[test] 51 + fn test_jsonl_reporter_empty() { 52 + let reporter = JsonlReporter::new(); 53 + assert!(reporter.output.is_empty()); 54 + } 55 + 56 + #[test] 57 + fn test_jsonl_reporter_add_file() { 58 + let mut lines = BTreeMap::new(); 59 + lines.insert(1, 10); 60 + lines.insert(2, 0); 61 + 62 + let file = FileCoverage::new("test.rs".to_string(), lines); 63 + let mut reporter = JsonlReporter::new(); 64 + reporter.add_file(&file); 65 + 66 + assert_eq!(reporter.output.len(), 1); 67 + assert!(reporter.output[0].contains("test.rs")); 68 + } 69 + 70 + #[test] 71 + fn test_jsonl_reporter_add_report() { 72 + let mut lines1 = BTreeMap::new(); 73 + lines1.insert(1, 10); 74 + 75 + let mut lines2 = BTreeMap::new(); 76 + lines2.insert(1, 5); 77 + lines2.insert(2, 0); 78 + 79 + let file1 = FileCoverage::new("test1.rs".to_string(), lines1); 80 + let file2 = FileCoverage::new("test2.rs".to_string(), lines2); 81 + let report = CoverageReport::new(vec![file1, file2]); 82 + 83 + let mut reporter = JsonlReporter::new(); 84 + reporter.add_report(&report); 85 + 86 + assert_eq!(reporter.output.len(), 2); 87 + } 88 + 89 + #[test] 90 + fn test_jsonl_reporter_to_string() { 91 + let mut lines = BTreeMap::new(); 92 + lines.insert(1, 10); 93 + lines.insert(2, 0); 94 + 95 + let file = FileCoverage::new("test.rs".to_string(), lines); 96 + let mut reporter = JsonlReporter::new(); 97 + reporter.add_file(&file); 98 + 99 + let output = reporter.as_string(); 100 + assert!(output.contains("test.rs")); 101 + let lines: Vec<&str> = output.lines().collect(); 102 + assert_eq!(lines.len(), 1); 103 + } 104 + 105 + #[test] 106 + fn test_jsonl_reporter_write_to_file() { 107 + let mut lines = BTreeMap::new(); 108 + lines.insert(1, 10); 109 + lines.insert(2, 0); 110 + 111 + let file = FileCoverage::new("test.rs".to_string(), lines); 112 + let mut reporter = JsonlReporter::new(); 113 + reporter.add_file(&file); 114 + 115 + let temp_dir = tempdir().unwrap(); 116 + let file_path = temp_dir.path().join("coverage.jsonl"); 117 + reporter.write_to_file(&file_path).unwrap(); 118 + 119 + let content = std::fs::read_to_string(&file_path).unwrap(); 120 + assert!(content.contains("test.rs")); 121 + } 122 + 123 + #[test] 124 + fn test_jsonl_reporter_serialization() { 125 + let mut lines = BTreeMap::new(); 126 + lines.insert(1, 10); 127 + lines.insert(2, 0); 128 + lines.insert(3, 5); 129 + 130 + let file = FileCoverage::new("test.rs".to_string(), lines); 131 + let mut reporter = JsonlReporter::new(); 132 + reporter.add_file(&file); 133 + 134 + let output = reporter.as_string(); 135 + let parsed: FileCoverage = serde_json::from_str(&output).unwrap(); 136 + assert_eq!(parsed.path, "test.rs"); 137 + assert_eq!(parsed.lines.len(), 3); 138 + } 139 + }
+252
crates/core/src/reporter/coverage_term.rs
··· 1 + use crate::coverage::{CoverageReport, FileCoverage}; 2 + use owo_colors::OwoColorize; 3 + 4 + #[cfg(test)] 5 + fn strip_ansi_codes(s: &str) -> String { 6 + let mut result = String::new(); 7 + let mut chars = s.chars().peekable(); 8 + 9 + while let Some(c) = chars.next() { 10 + if c == '\x1b' { 11 + if chars.peek() == Some(&'[') { 12 + chars.next(); 13 + while let Some(&c) = chars.peek() { 14 + chars.next(); 15 + if c.is_ascii_alphabetic() { 16 + break; 17 + } 18 + } 19 + } 20 + } else { 21 + result.push(c); 22 + } 23 + } 24 + 25 + result 26 + } 27 + 28 + fn wrap_line_ranges(ranges: &[(u32, u32)], max_width: usize) -> String { 29 + let ranges_plain: Vec<String> = ranges 30 + .iter() 31 + .map( 32 + |(start, end)| { 33 + if start == end { start.to_string() } else { format!("{}-{}", start, end) } 34 + }, 35 + ) 36 + .collect(); 37 + 38 + let full_str = ranges_plain.join(", "); 39 + 40 + if full_str.len() <= max_width { 41 + return full_str.red().to_string(); 42 + } 43 + 44 + let mut result = String::new(); 45 + let mut current_line = String::new(); 46 + let mut current_width = 0; 47 + 48 + for (i, range_str) in ranges_plain.iter().enumerate() { 49 + let range_width = range_str.len(); 50 + let comma_sep = ", "; 51 + let sep_width = if current_width > 0 { comma_sep.len() } else { 0 }; 52 + 53 + if current_width + sep_width + range_width <= max_width { 54 + if current_width > 0 { 55 + current_line.push_str(comma_sep); 56 + current_width += comma_sep.len(); 57 + } 58 + current_line.push_str(range_str); 59 + current_width += range_width; 60 + } else { 61 + if !current_line.is_empty() { 62 + result.push_str(&current_line.red().to_string()); 63 + result.push('\n'); 64 + } 65 + current_line = range_str.to_string(); 66 + current_width = range_width; 67 + } 68 + 69 + if i == ranges_plain.len() - 1 && !current_line.is_empty() { 70 + result.push_str(&current_line.red().to_string()); 71 + } 72 + } 73 + 74 + result 75 + } 76 + 77 + pub fn report_coverage(report: &CoverageReport) -> String { 78 + let mut output = String::new(); 79 + 80 + output.push_str(&"=".repeat(80).cyan().to_string()); 81 + output.push('\n'); 82 + output.push_str(&"COVERAGE REPORT".cyan().bold().to_string()); 83 + output.push('\n'); 84 + output.push_str(&"=".repeat(80).cyan().to_string()); 85 + output.push_str("\n\n"); 86 + 87 + output.push_str(&"SUMMARY".green().bold().to_string()); 88 + output.push('\n'); 89 + output.push_str(&"-".repeat(80).cyan().to_string()); 90 + output.push('\n'); 91 + output.push_str(&format!("Total files: {}\n", report.files.len().bold())); 92 + output.push_str(&format!("Total lines: {}\n", report.totals.total.bold())); 93 + output.push_str(&format!( 94 + "Covered lines: {}\n", 95 + report.totals.hit.green().bold() 96 + )); 97 + output.push_str(&format!( 98 + "Uncovered lines: {}\n", 99 + report.totals.miss.red().bold() 100 + )); 101 + 102 + let rate_text = if report.totals.rate >= 80.0 { 103 + format!("{:.2}%", report.totals.rate).green().bold().to_string() 104 + } else if report.totals.rate >= 50.0 { 105 + format!("{:.2}%", report.totals.rate).yellow().bold().to_string() 106 + } else { 107 + format!("{:.2}%", report.totals.rate).red().bold().to_string() 108 + }; 109 + output.push_str(&format!("Coverage rate: {}\n\n", rate_text)); 110 + 111 + if !report.files.is_empty() { 112 + output.push_str(&"FILE COVERAGE".green().bold().to_string()); 113 + output.push('\n'); 114 + output.push_str(&"-".repeat(80).cyan().to_string()); 115 + output.push('\n'); 116 + 117 + for file in &report.files { 118 + output.push_str(&format!("{}\n", format_file_coverage(file, 2))); 119 + } 120 + } 121 + 122 + output.push_str(&"=".repeat(80).cyan().to_string()); 123 + output.push('\n'); 124 + 125 + output 126 + } 127 + 128 + pub fn format_file_coverage(file: &FileCoverage, indent: usize) -> String { 129 + let spaces = " ".repeat(indent); 130 + let uncovered_prefix = format!("{} Uncovered: ", spaces); 131 + let mut output = String::new(); 132 + 133 + output.push_str(&spaces); 134 + output.push_str("FILE: "); 135 + let file_path = file.path.bold().to_string(); 136 + let current_width = spaces.len() + "FILE: ".len(); 137 + let remaining_width = 80 - current_width; 138 + 139 + let path_only = file.path.to_string(); 140 + if path_only.len() <= remaining_width { 141 + output.push_str(&file_path); 142 + } else { 143 + for (i, chunk) in path_only.as_bytes().chunks(remaining_width).enumerate() { 144 + if i > 0 { 145 + output.push_str(&format!("\n{} ", spaces)); 146 + } 147 + output.push_str(&String::from_utf8_lossy(chunk).bold().to_string()); 148 + } 149 + } 150 + output.push('\n'); 151 + 152 + let rate_text = if file.summary.rate >= 80.0 { 153 + format!("{:.2}%", file.summary.rate).green().bold().to_string() 154 + } else if file.summary.rate >= 50.0 { 155 + format!("{:.2}%", file.summary.rate).yellow().bold().to_string() 156 + } else { 157 + format!("{:.2}%", file.summary.rate).red().bold().to_string() 158 + }; 159 + 160 + output.push_str(&spaces); 161 + output.push_str(&format!( 162 + " Lines: {} / {} ({})\n", 163 + file.summary.hit.green().bold(), 164 + file.summary.total, 165 + rate_text 166 + )); 167 + 168 + if !file.miss_ranges.is_empty() { 169 + output.push_str(&uncovered_prefix); 170 + let max_width = 80 - uncovered_prefix.len(); 171 + 172 + let wrapped_ranges = wrap_line_ranges(&file.miss_ranges, max_width); 173 + for (i, line) in wrapped_ranges.lines().enumerate() { 174 + if i > 0 { 175 + output.push_str(&" ".repeat(uncovered_prefix.len())); 176 + } 177 + output.push_str(line); 178 + output.push('\n'); 179 + } 180 + } 181 + 182 + output 183 + } 184 + 185 + #[cfg(test)] 186 + mod tests { 187 + use super::*; 188 + use crate::coverage::FileCoverage; 189 + use std::collections::BTreeMap; 190 + 191 + #[test] 192 + fn test_report_coverage_empty() { 193 + let report = CoverageReport::new(vec![]); 194 + let output = report_coverage(&report); 195 + let output = strip_ansi_codes(&output); 196 + 197 + assert!(output.contains("COVERAGE REPORT")); 198 + assert!(output.contains("Total files: 0")); 199 + } 200 + 201 + #[test] 202 + fn test_report_coverage_with_files() { 203 + let mut lines = BTreeMap::new(); 204 + lines.insert(1, 10); 205 + lines.insert(2, 0); 206 + lines.insert(3, 5); 207 + 208 + let file = FileCoverage::new("test.rs".to_string(), lines); 209 + let report = CoverageReport::new(vec![file]); 210 + let output = report_coverage(&report); 211 + let output = strip_ansi_codes(&output); 212 + 213 + assert!(output.contains("COVERAGE REPORT")); 214 + assert!(output.contains("test.rs")); 215 + assert!(output.contains("2 / 3")); 216 + assert!(output.contains("Uncovered: 2")); 217 + } 218 + 219 + #[test] 220 + fn test_format_file_coverage() { 221 + let mut lines = BTreeMap::new(); 222 + lines.insert(1, 10); 223 + lines.insert(2, 0); 224 + lines.insert(3, 5); 225 + lines.insert(4, 0); 226 + lines.insert(5, 0); 227 + 228 + let file = FileCoverage::new("test.rs".to_string(), lines); 229 + let output = format_file_coverage(&file, 2); 230 + let output = strip_ansi_codes(&output); 231 + 232 + assert!(output.contains("FILE: test.rs")); 233 + assert!(output.contains("2 / 5")); 234 + assert!(output.contains("Uncovered: 2, 4-5")); 235 + } 236 + 237 + #[test] 238 + fn test_format_file_coverage_full() { 239 + let mut lines = BTreeMap::new(); 240 + lines.insert(1, 10); 241 + lines.insert(2, 5); 242 + lines.insert(3, 1); 243 + 244 + let file = FileCoverage::new("full.rs".to_string(), lines); 245 + let output = format_file_coverage(&file, 2); 246 + let output = strip_ansi_codes(&output); 247 + 248 + assert!(output.contains("FILE: full.rs")); 249 + assert!(output.contains("3 / 3 (100.00%)")); 250 + assert!(!output.contains("Uncovered")); 251 + } 252 + }
+7
crates/core/src/reporter/mod.rs
··· 1 + pub mod coverage_jsonl; 2 + pub mod coverage_term; 3 + pub mod legacy; 4 + 5 + pub use coverage_jsonl::JsonlReporter; 6 + pub use coverage_term::{format_file_coverage, report_coverage}; 7 + pub use legacy::{FileReport, Report, Summary};
+61
todo.txt
··· 1 + ================================================================================ 2 + todo.txt 3 + ================================================================================ 4 + A. Test Coverage Ingestion 5 + - Ingest LCOV/JSON from `cargo llvm-cov` or `cargo test -- --coverage`. 6 + - Render to terminal, HTML, Markdown, JSONL. 7 + - Support path normalization (`--root`, `--strip-prefix`, `--ignore-regex`). 8 + - Support Git diff coverage 9 + 10 + B. AST-Based Clone Detection 11 + - Compare abstract syntax trees or subtrees. 12 + - Identifies clones resilient to renamed variables or formatting changes. 13 + - Requires per-language AST adapters. 14 + 15 + C. Cognitive Complexity 16 + - Scores human comprehension cost. 17 + - Rewards flattened control flow, penalizes deep nesting. 18 + - Requires AST-level traversal. 19 + 20 + D. Maintainability Index 21 + - Combines Cyclomatic, Halstead, and LOC into a single number. 22 + - Good for dashboards and longitudinal tracking. 23 + 24 + E. Semantic Clone Detection 25 + - Goes beyond syntax: identifies logically equivalent code. 26 + - Requires control/data-flow analysis. 27 + 28 + F. Dependency Metrics 29 + - Coupling, fan-in/fan-out, depth of inheritance. 30 + - Requires language-specific type-resolution or symbol graph extraction. 31 + 32 + G. Hotspot Analysis 33 + - Combine Git history + complexity metrics. 34 + - Identify files that change often AND are complex. 35 + 36 + H. Incremental Mode 37 + - Cache hashes/graphs. 38 + - Analyze only changed files and touched boundaries. 39 + 40 + I. Rich Reports 41 + - HTML dashboards 42 + - SVG graphs (CFG visualization) 43 + - JSON with stable schema for CI systems 44 + 45 + ------------------------------------------------------------------------------- 46 + SUMMARY 47 + 48 + MVP: 49 + - LOC 50 + - Cyclomatic Complexity 51 + - Rabin-Karp Clone Detection 52 + - Halstead Complexity 53 + 54 + Beyond MVP: 55 + - AST-based clones 56 + - Cognitive Complexity 57 + - Maintainability Index 58 + - Coupling metrics 59 + - Call graphs and data-flow graphs 60 + - Hotspot analysis 61 + - Incremental scanning