diff --git a/Cargo.lock b/Cargo.lock index 0166daf..47d4a23 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -88,12 +88,42 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "assert_cmd" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c5bcfa8749ac45dd12cb11055aeeb6b27a3895560d60d71e3c23bf979e60514" +dependencies = [ + "anstyle", + "bstr", + "libc", + "predicates", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + [[package]] name = "autocfg" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + [[package]] name = "bitcode" version = "0.6.9" @@ -197,6 +227,7 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" name = "chainsaw-cli" version = "0.3.0" dependencies = [ + "assert_cmd", "bitcode", "clap", "clap_complete", @@ -211,6 +242,8 @@ dependencies = [ "oxc_resolver", "oxc_span", "oxc_syntax", + "predicates", + "proptest", "rayon", "rustyline", "serde", @@ -396,6 +429,12 @@ dependencies = [ "parking_lot_core", ] +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + [[package]] name = "digest" version = "0.10.7" @@ -525,6 +564,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + [[package]] name = "foldhash" version = "0.1.5" @@ -565,6 +610,18 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", +] + [[package]] name = "getrandom" version = "0.4.1" @@ -1572,6 +1629,12 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "610a5acd306ec67f907abe5567859a3c693fb9886eb1f012ab8f2a47bef3db51" +[[package]] +name = "normalize-line-endings" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" + [[package]] name = "notify" version = "7.0.0" @@ -1985,6 +2048,45 @@ dependencies = [ "zerovec", ] +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "predicates" +version = "3.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ada8f2932f28a27ee7b70dd6c1c39ea0675c55a36879ab92f3a715eaa1e63cfe" +dependencies = [ + "anstyle", + "difflib", + "float-cmp", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates-core" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad38746f3166b4031b1a0d39ad9f954dd291e7854fcc0eed52ee41a0b50d144" + +[[package]] +name = "predicates-tree" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0de1b847b39c8131db0467e9df1ff60e6d0562ab8e9a16e568ad0fdb372e2f2" +dependencies = [ + "predicates-core", + "termtree", +] + [[package]] name = "prettyplease" version = "0.2.37" @@ -2013,6 +2115,31 @@ dependencies = [ "parking_lot", ] +[[package]] +name = "proptest" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37566cb3fdacef14c0737f9546df7cfeadbfbc9fef10991038bf5015d0c80532" +dependencies = [ + "bit-set", + "bit-vec", + "bitflags 2.11.0", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quote" version = "1.0.44" @@ -2038,6 +2165,44 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rand_xorshift" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" +dependencies = [ + "rand_core", +] + [[package]] name = "rayon" version = "1.11.0" @@ -2150,6 +2315,18 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" +[[package]] +name = "rusty-fork" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + [[package]] name = "rustyline" version = "15.0.0" @@ -2400,12 +2577,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0136791f7c95b1f6dd99f9cc786b91bb81c3800b639b3478e561ddb7be95e5f1" dependencies = [ "fastrand", - "getrandom", + "getrandom 0.4.1", "once_cell", "rustix", "windows-sys 0.61.2", ] +[[package]] +name = "termtree" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" + [[package]] name = "textwrap" version = "0.16.2" @@ -2578,6 +2761,12 @@ dependencies = [ "arrayvec", ] +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unicode-bom" version = "2.0.3" @@ -2671,6 +2860,15 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + [[package]] name = "walkdir" version = "2.5.0" @@ -3088,6 +3286,26 @@ dependencies = [ "synstructure", ] +[[package]] +name = "zerocopy" +version = "0.8.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db6d35d663eadb6c932438e763b262fe1a70987f9ae936e60158176d710cae4a" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4122cd3169e94605190e77839c9a40d40ed048d305bfdc146e7df40ab0f3e517" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "zerofrom" version = "0.1.6" diff --git a/Cargo.toml b/Cargo.toml index 48dca2a..47c3a60 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,6 +48,9 @@ path = "src/main.rs" [dev-dependencies] stats = { path = "stats" } ignore = "0.4" +assert_cmd = "2" +predicates = "3" +proptest = "1" [[bench]] name = "benchmarks" diff --git a/perf.toml b/perf.toml index 1017cbf..fffe3d9 100644 --- a/perf.toml +++ b/perf.toml @@ -82,5 +82,14 @@ files = [ "xtask/src/bench.rs", "xtask/src/find_entry.rs", "tests/perf_registry.rs", + "tests/common/mod.rs", + "tests/cli.rs", + "tests/session_reports.rs", + "tests/json_roundtrip.rs", + "tests/git_refs.rs", + "tests/cache_safety.rs", + "tests/negative_cases.rs", + "tests/property.rs", + "tests/report_battery.rs", ] benchmarks = [] diff --git a/src/error.rs b/src/error.rs index 0e9a40f..e74af62 100644 --- a/src/error.rs +++ b/src/error.rs @@ -38,6 +38,13 @@ pub enum Error { InvalidTopValue(&'static str, i32), /// Readline/REPL initialization failed. Readline(String), + /// --max-weight threshold exceeded. + MaxWeightExceeded { + kind: &'static str, + weight: u64, + module_count: usize, + threshold: u64, + }, } impl Error { @@ -113,6 +120,20 @@ impl std::fmt::Display for Error { write!(f, "invalid value {n} for {flag}: must be -1 (all) or 0+") } Self::Readline(msg) => write!(f, "readline: {msg}"), + Self::MaxWeightExceeded { + kind, + weight, + module_count, + threshold, + } => { + let plural = if *module_count == 1 { "" } else { "s" }; + write!( + f, + "{kind} transitive weight {} ({module_count} module{plural}) exceeds --max-weight threshold {}", + crate::report::format_size(*weight), + crate::report::format_size(*threshold), + ) + } } } } diff --git a/src/git.rs b/src/git.rs index ff36471..10178c3 100644 --- a/src/git.rs +++ b/src/git.rs @@ -15,8 +15,8 @@ pub enum DiffArg { /// /// Detection order: /// 1. Existing file on disk -> Snapshot -/// 2. Path-like arg that doesn't exist (contains `/` or `.json`) -> error (file not found) -/// 3. `git rev-parse --verify ` succeeds -> `GitRef` +/// 2. `git rev-parse --verify ^{commit}` succeeds -> `GitRef` +/// 3. Path-like arg that doesn't exist (contains `/` or `.json`) -> error (file not found) /// 4. Neither -> error pub fn classify_diff_arg(arg: &str, repo_root: &Path) -> Result { let path = Path::new(arg); @@ -26,19 +26,9 @@ pub fn classify_diff_arg(arg: &str, repo_root: &Path) -> Result Result Result<() let saved = load_snapshot(snapshot_path)?; let diff = query::diff_snapshots(&saved, &result.to_snapshot(&entry_rel)); let report = report::DiffReport::from_diff(&diff, &saved.entry, &entry_rel, args.limit); - print!("{}", report.to_terminal(color)); + if args.json { + println!("{}", report.to_json()); + } else { + print!("{}", report.to_terminal(color)); + } return Ok(()); } @@ -368,19 +372,12 @@ fn run_trace(args: TraceArgs, color: bool, sc: report::StderrColor) -> Result<() } else { "static" }; - eprintln!( - "{} {kind} transitive weight {} ({} module{}) exceeds --max-weight threshold {}", - sc.error("error:"), - report::format_size(report.static_weight_bytes), - report.static_module_count, - if report.static_module_count == 1 { - "" - } else { - "s" - }, - report::format_size(threshold), - ); - std::process::exit(1); + return Err(Error::MaxWeightExceeded { + kind, + weight: report.static_weight_bytes, + module_count: report.static_module_count, + threshold, + }); } if !args.quiet { @@ -659,13 +656,17 @@ fn build_snapshot_from_ref( } /// Build a snapshot from the current working tree. +/// +/// Uses `no_cache=true` to avoid polluting the disk cache — the "before" +/// side of a diff may have written a reduced (git-tree-only) graph, and +/// the working-tree snapshot must not read or overwrite that entry. fn build_snapshot_from_working_tree( entry: &Path, quiet: bool, sc: report::StderrColor, ) -> Result { let start = Instant::now(); - let (loaded, _cache_write) = loader::load_graph(entry, false)?; + let (loaded, _cache_write) = loader::load_graph(entry, true)?; if !quiet { print_build_status(&loaded, start, sc); } diff --git a/src/repl.rs b/src/repl.rs index 1f329b7..0bb0125 100644 --- a/src/repl.rs +++ b/src/repl.rs @@ -48,6 +48,7 @@ pub struct CommandOptions { pub top: Option, pub top_modules: Option, pub ignore: Option>, + pub json: bool, } impl CommandOptions { @@ -69,16 +70,18 @@ impl CommandOptions { /// Extract known `--flag` tokens from an argument list. /// -/// Returns `(CommandOptions, positional_arg)`. The positional argument is -/// the first non-flag token that isn't consumed as a flag value. `--ignore` -/// consumes all subsequent non-flag tokens until the next `--` flag or end -/// of input, so it must appear after the positional arg or be the last flag. -fn parse_flags(tokens: &[&str]) -> (CommandOptions, String) { +/// Returns `Ok((CommandOptions, positional_arg))` or `Err(message)` if an +/// unknown `--flag` is encountered. The positional argument is the first +/// non-flag token that isn't consumed as a flag value. `--ignore` consumes +/// all subsequent non-flag tokens until the next `--` flag or end of input, +/// so it must appear after the positional arg or be the last flag. +fn parse_flags(tokens: &[&str]) -> Result<(CommandOptions, String), String> { let mut opts = CommandOptions::default(); let mut positional = Vec::new(); let mut i = 0; while i < tokens.len() { match tokens[i] { + "--json" => opts.json = true, "--include-dynamic" => opts.include_dynamic = Some(true), "--no-include-dynamic" => opts.include_dynamic = Some(false), "--top" => { @@ -109,11 +112,16 @@ fn parse_flags(tokens: &[&str]) -> (CommandOptions, String) { } continue; // i already advanced past consumed tokens } + other if other.starts_with("--") => { + return Err(format!( + "unknown flag '{other}' (try: --json, --include-dynamic, --top, --top-modules, --ignore)" + )); + } other => positional.push(other), } i += 1; } - (opts, positional.join(" ")) + Ok((opts, positional.join(" "))) } /// A parsed REPL command. @@ -132,9 +140,9 @@ pub enum Command { /// List all third-party packages. Packages(CommandOptions), /// List direct imports of a file. - Imports(String), + Imports(String, CommandOptions), /// List files that import a given file. - Importers(String), + Importers(String, CommandOptions), /// Show package info by name. Info(String), /// Set a session option. @@ -153,6 +161,7 @@ pub enum Command { impl Command { /// Parse a single line of user input into a command. + #[allow(clippy::too_many_lines)] pub fn parse(line: &str) -> Self { /// Extract a non-empty positional or return an error message. fn require_positional(positional: &str, msg: &str) -> Result { @@ -185,50 +194,68 @@ impl Command { .unwrap_or_default(); match cmd { - "trace" => { - let (opts, positional) = parse_flags(&tokens); - let file = if positional.is_empty() { - None - } else { - Some(positional) - }; - Self::Trace(file, opts) - } - "chain" => { - let (opts, positional) = parse_flags(&tokens); - match require_positional(&positional, "chain requires a target argument") { - Ok(a) => Self::Chain(a, opts), - Err(e) => Self::Unknown(e), - } - } - "cut" => { - let (opts, positional) = parse_flags(&tokens); - match require_positional(&positional, "cut requires a target argument") { - Ok(a) => Self::Cut(a, opts), - Err(e) => Self::Unknown(e), - } - } - "diff" => { - let (opts, positional) = parse_flags(&tokens); - match require_positional(&positional, "diff requires a file argument") { - Ok(a) => Self::Diff(a, opts), - Err(e) => Self::Unknown(e), + "trace" => match parse_flags(&tokens) { + Ok((opts, positional)) => { + let file = if positional.is_empty() { + None + } else { + Some(positional) + }; + Self::Trace(file, opts) } - } - "packages" => { - let (opts, _) = parse_flags(&tokens); - Self::Packages(opts) - } + Err(e) => Self::Unknown(e), + }, "entry" => match require_arg(arg, "entry requires a file argument") { Ok(a) => Self::Entry(a), Err(e) => Self::Unknown(e), }, - "imports" => match require_arg(arg, "imports requires a file argument") { - Ok(a) => Self::Imports(a), + "chain" => match parse_flags(&tokens) { + Ok((opts, positional)) => { + match require_positional(&positional, "chain requires a target argument") { + Ok(a) => Self::Chain(a, opts), + Err(e) => Self::Unknown(e), + } + } + Err(e) => Self::Unknown(e), + }, + "cut" => match parse_flags(&tokens) { + Ok((opts, positional)) => { + match require_positional(&positional, "cut requires a target argument") { + Ok(a) => Self::Cut(a, opts), + Err(e) => Self::Unknown(e), + } + } + Err(e) => Self::Unknown(e), + }, + "diff" => match parse_flags(&tokens) { + Ok((opts, positional)) => { + match require_positional(&positional, "diff requires a file argument") { + Ok(a) => Self::Diff(a, opts), + Err(e) => Self::Unknown(e), + } + } + Err(e) => Self::Unknown(e), + }, + "packages" => match parse_flags(&tokens) { + Ok((opts, _)) => Self::Packages(opts), Err(e) => Self::Unknown(e), }, - "importers" => match require_arg(arg, "importers requires a file argument") { - Ok(a) => Self::Importers(a), + "imports" => match parse_flags(&tokens) { + Ok((opts, positional)) => { + match require_positional(&positional, "imports requires a file argument") { + Ok(a) => Self::Imports(a, opts), + Err(e) => Self::Unknown(e), + } + } + Err(e) => Self::Unknown(e), + }, + "importers" => match parse_flags(&tokens) { + Ok((opts, positional)) => { + match require_positional(&positional, "importers requires a file argument") { + Ok(a) => Self::Importers(a, opts), + Err(e) => Self::Unknown(e), + } + } Err(e) => Self::Unknown(e), }, "info" => match require_arg(arg, "info requires a package name") { @@ -496,8 +523,10 @@ pub fn run(entry: &Path, no_color: bool, sc: StderrColor) -> Result<(), Error> { Command::Packages(ref opts) => { dispatch_packages(&session, opts, &settings, color); } - Command::Imports(path) => dispatch_imports(&session, &path, sc), - Command::Importers(path) => dispatch_importers(&session, &path, sc), + Command::Imports(path, ref opts) => dispatch_imports(&session, &path, opts, sc), + Command::Importers(path, ref opts) => { + dispatch_importers(&session, &path, opts, sc); + } Command::Info(name) => dispatch_info(&session, &name, sc), Command::Set(arg) => dispatch_set(&mut settings, &arg, sc), Command::Unset(arg) => dispatch_unset(&mut settings, &arg, sc), @@ -547,7 +576,11 @@ fn dispatch_trace( } else { session.trace_report(&trace_opts, top_modules) }; - print!("{}", report.to_terminal(color)); + if opts.json { + println!("{}", report.to_json()); + } else { + print!("{}", report.to_terminal(color)); + } } fn dispatch_entry(session: &mut Session, path: &str, sc: StderrColor) { @@ -574,7 +607,11 @@ fn dispatch_chain( } let (trace_opts, _) = opts.resolve(settings); let report = session.chain_report(target, trace_opts.include_dynamic); - print!("{}", report.to_terminal(color)); + if opts.json { + println!("{}", report.to_json()); + } else { + print!("{}", report.to_terminal(color)); + } } fn dispatch_cut( @@ -592,7 +629,11 @@ fn dispatch_cut( } let (trace_opts, _) = opts.resolve(settings); let report = session.cut_report(target, trace_opts.top_n, trace_opts.include_dynamic); - print!("{}", report.to_terminal(color)); + if opts.json { + println!("{}", report.to_json()); + } else { + print!("{}", report.to_terminal(color)); + } } fn dispatch_diff( @@ -605,7 +646,13 @@ fn dispatch_diff( ) { let (trace_opts, _) = opts.resolve(settings); match session.diff_report(Path::new(path), &trace_opts, trace_opts.top_n) { - Ok(report) => print!("{}", report.to_terminal(color)), + Ok(report) => { + if opts.json { + println!("{}", report.to_json()); + } else { + print!("{}", report.to_terminal(color)); + } + } Err(e) => eprintln!("{} {e}", sc.error("error:")), } } @@ -618,12 +665,33 @@ fn dispatch_packages( ) { let top = opts.top.unwrap_or(settings.top); let report = session.packages_report(top); - print!("{}", report.to_terminal(color)); + if opts.json { + println!("{}", report.to_json()); + } else { + print!("{}", report.to_terminal(color)); + } } -fn dispatch_imports(session: &Session, path: &str, sc: StderrColor) { +fn dispatch_imports(session: &Session, path: &str, opts: &CommandOptions, sc: StderrColor) { match session.imports(Path::new(path)) { Ok(imports) => { + if opts.json { + let entries: Vec<_> = imports + .iter() + .map(|(p, kind)| { + serde_json::json!({ + "path": report::relative_path(p, session.root()), + "kind": match kind { + EdgeKind::Static => "static", + EdgeKind::Dynamic => "dynamic", + EdgeKind::TypeOnly => "type-only", + } + }) + }) + .collect(); + println!("{}", serde_json::to_string_pretty(&entries).unwrap()); + return; + } if imports.is_empty() { println!(" (no imports)"); return; @@ -642,9 +710,26 @@ fn dispatch_imports(session: &Session, path: &str, sc: StderrColor) { } } -fn dispatch_importers(session: &Session, path: &str, sc: StderrColor) { +fn dispatch_importers(session: &Session, path: &str, opts: &CommandOptions, sc: StderrColor) { match session.importers(Path::new(path)) { Ok(importers) => { + if opts.json { + let entries: Vec<_> = importers + .iter() + .map(|(p, kind)| { + serde_json::json!({ + "path": report::relative_path(p, session.root()), + "kind": match kind { + EdgeKind::Static => "static", + EdgeKind::Dynamic => "dynamic", + EdgeKind::TypeOnly => "type-only", + } + }) + }) + .collect(); + println!("{}", serde_json::to_string_pretty(&entries).unwrap()); + return; + } if importers.is_empty() { println!(" (no importers)"); return; @@ -813,6 +898,7 @@ fn print_help() { println!(" quit Exit"); println!(); println!("Inline flags (override session settings for one command):"); + println!(" --json Output as JSON instead of terminal format"); println!(" --include-dynamic / --no-include-dynamic Include/exclude dynamic imports"); println!(" --top N Limit heavy deps / packages shown"); println!(" --top-modules N Limit modules by exclusive weight"); @@ -999,14 +1085,14 @@ mod tests { #[test] fn parse_imports() { assert!( - matches!(Command::parse("imports src/foo.ts"), Command::Imports(ref f) if f == "src/foo.ts") + matches!(Command::parse("imports src/foo.ts"), Command::Imports(ref f, _) if f == "src/foo.ts") ); } #[test] fn parse_importers() { assert!( - matches!(Command::parse("importers lib/bar.py"), Command::Importers(ref f) if f == "lib/bar.py") + matches!(Command::parse("importers lib/bar.py"), Command::Importers(ref f, _) if f == "lib/bar.py") ); } @@ -1087,6 +1173,7 @@ mod tests { top: Some(5), top_modules: Some(50), ignore: Some(vec!["zod".into()]), + json: false, }; let (trace_opts, top_modules) = opts.resolve(&settings); assert!(trace_opts.include_dynamic); @@ -1097,7 +1184,7 @@ mod tests { #[test] fn parse_flags_no_flags() { - let (opts, remaining) = parse_flags(&["src/index.ts"]); + let (opts, remaining) = parse_flags(&["src/index.ts"]).unwrap(); assert!(opts.include_dynamic.is_none()); assert!(opts.top.is_none()); assert_eq!(remaining, "src/index.ts"); @@ -1105,28 +1192,28 @@ mod tests { #[test] fn parse_flags_dynamic() { - let (opts, remaining) = parse_flags(&["--include-dynamic", "src/index.ts"]); + let (opts, remaining) = parse_flags(&["--include-dynamic", "src/index.ts"]).unwrap(); assert_eq!(opts.include_dynamic, Some(true)); assert_eq!(remaining, "src/index.ts"); } #[test] fn parse_flags_no_dynamic() { - let (opts, remaining) = parse_flags(&["--no-include-dynamic", "src/index.ts"]); + let (opts, remaining) = parse_flags(&["--no-include-dynamic", "src/index.ts"]).unwrap(); assert_eq!(opts.include_dynamic, Some(false)); assert_eq!(remaining, "src/index.ts"); } #[test] fn parse_flags_top() { - let (opts, remaining) = parse_flags(&["--top", "5", "src/index.ts"]); + let (opts, remaining) = parse_flags(&["--top", "5", "src/index.ts"]).unwrap(); assert_eq!(opts.top, Some(5)); assert_eq!(remaining, "src/index.ts"); } #[test] fn parse_flags_top_modules() { - let (opts, remaining) = parse_flags(&["--top-modules", "30", "src/index.ts"]); + let (opts, remaining) = parse_flags(&["--top-modules", "30", "src/index.ts"]).unwrap(); assert_eq!(opts.top_modules, Some(30)); assert_eq!(remaining, "src/index.ts"); } @@ -1135,7 +1222,8 @@ mod tests { fn parse_flags_ignore() { // --ignore is greedy: consumes all non-flag tokens after it. // Users should put --ignore last or use `set ignore`. - let (opts, remaining) = parse_flags(&["src/index.ts", "--ignore", "zod", "lodash"]); + let (opts, remaining) = + parse_flags(&["src/index.ts", "--ignore", "zod", "lodash"]).unwrap(); assert_eq!(opts.ignore, Some(vec!["zod".into(), "lodash".into()])); assert_eq!(remaining, "src/index.ts"); } @@ -1143,7 +1231,7 @@ mod tests { #[test] fn parse_flags_ignore_stops_at_next_flag() { let (opts, remaining) = - parse_flags(&["src/index.ts", "--ignore", "zod", "--include-dynamic"]); + parse_flags(&["src/index.ts", "--ignore", "zod", "--include-dynamic"]).unwrap(); assert_eq!(opts.ignore, Some(vec!["zod".to_string()])); assert_eq!(opts.include_dynamic, Some(true)); assert_eq!(remaining, "src/index.ts"); @@ -1151,7 +1239,7 @@ mod tests { #[test] fn parse_flags_multiple() { - let (opts, remaining) = parse_flags(&["--include-dynamic", "--top", "5", "zod"]); + let (opts, remaining) = parse_flags(&["--include-dynamic", "--top", "5", "zod"]).unwrap(); assert_eq!(opts.include_dynamic, Some(true)); assert_eq!(opts.top, Some(5)); assert_eq!(remaining, "zod"); @@ -1159,21 +1247,21 @@ mod tests { #[test] fn parse_flags_empty() { - let (opts, remaining) = parse_flags(&[]); + let (opts, remaining) = parse_flags(&[]).unwrap(); assert!(opts.include_dynamic.is_none()); assert!(remaining.is_empty()); } #[test] fn parse_flags_only_flags_no_positional() { - let (opts, remaining) = parse_flags(&["--include-dynamic"]); + let (opts, remaining) = parse_flags(&["--include-dynamic"]).unwrap(); assert_eq!(opts.include_dynamic, Some(true)); assert!(remaining.is_empty()); } #[test] fn parse_flags_scoped_package_not_treated_as_flag() { - let (opts, remaining) = parse_flags(&["@scope/pkg"]); + let (opts, remaining) = parse_flags(&["@scope/pkg"]).unwrap(); assert!(opts.include_dynamic.is_none()); assert_eq!(remaining, "@scope/pkg"); } @@ -1182,42 +1270,55 @@ mod tests { fn parse_flags_top_non_numeric_preserves_positional() { // When --top is followed by a non-numeric token, the token should not // be consumed as the --top value — it stays as a positional arg. - let (opts, remaining) = parse_flags(&["--top", "src/index.ts"]); + let (opts, remaining) = parse_flags(&["--top", "src/index.ts"]).unwrap(); assert!(opts.top.is_none()); assert_eq!(remaining, "src/index.ts"); } #[test] fn parse_flags_top_modules_non_numeric_preserves_positional() { - let (opts, remaining) = parse_flags(&["--top-modules", "src/index.ts"]); + let (opts, remaining) = parse_flags(&["--top-modules", "src/index.ts"]).unwrap(); assert!(opts.top_modules.is_none()); assert_eq!(remaining, "src/index.ts"); } #[test] fn parse_flags_top_rejects_negative_below_minus_one() { - let (opts, _) = parse_flags(&["--top", "-5", "src/index.ts"]); + let (opts, _) = parse_flags(&["--top", "-5", "src/index.ts"]).unwrap(); assert!(opts.top.is_none()); } #[test] fn parse_flags_top_accepts_negative_one() { - let (opts, _) = parse_flags(&["--top", "-1", "src/index.ts"]); + let (opts, _) = parse_flags(&["--top", "-1", "src/index.ts"]).unwrap(); assert_eq!(opts.top, Some(-1)); } #[test] fn parse_flags_top_modules_rejects_negative_below_minus_one() { - let (opts, _) = parse_flags(&["--top-modules", "-5", "src/index.ts"]); + let (opts, _) = parse_flags(&["--top-modules", "-5", "src/index.ts"]).unwrap(); assert!(opts.top_modules.is_none()); } #[test] fn parse_flags_top_modules_accepts_negative_one() { - let (opts, _) = parse_flags(&["--top-modules", "-1", "src/index.ts"]); + let (opts, _) = parse_flags(&["--top-modules", "-1", "src/index.ts"]).unwrap(); assert_eq!(opts.top_modules, Some(-1)); } + #[test] + fn parse_flags_json() { + let (opts, remaining) = parse_flags(&["--json", "src/index.ts"]).unwrap(); + assert!(opts.json); + assert_eq!(remaining, "src/index.ts"); + } + + #[test] + fn parse_flags_unknown_flag_returns_error() { + let err = parse_flags(&["--bogus", "src/index.ts"]).unwrap_err(); + assert!(err.contains("unknown flag '--bogus'")); + } + #[test] fn parse_trace_with_flags() { let cmd = Command::parse("trace --include-dynamic --top 5 src/index.ts"); diff --git a/src/report.rs b/src/report.rs index de0fc70..651600c 100644 --- a/src/report.rs +++ b/src/report.rs @@ -356,8 +356,8 @@ pub struct PackagesReport { #[derive(Debug, Clone, Serialize)] pub struct PackageListEntry { pub name: String, - pub size: u64, - pub files: u32, + pub total_size_bytes: u64, + pub file_count: u32, } // --------------------------------------------------------------------------- @@ -849,9 +849,9 @@ impl PackagesReport { out, " {:<40} {:>8} {} file{}", pkg.name, - format_size(pkg.size), - pkg.files, - plural(u64::from(pkg.files)) + format_size(pkg.total_size_bytes), + pkg.file_count, + plural(u64::from(pkg.file_count)) ) .unwrap(); } @@ -1027,15 +1027,15 @@ mod tests { package_count: 2, packages: vec![PackageListEntry { name: "zod".into(), - size: 500, - files: 3, + total_size_bytes: 500, + file_count: 3, }], }; let json: serde_json::Value = serde_json::from_str(&report.to_json()).unwrap(); assert_eq!(json["package_count"], 2); assert_eq!(json["packages"][0]["name"], "zod"); - assert_eq!(json["packages"][0]["size"], 500); - assert_eq!(json["packages"][0]["files"], 3); + assert_eq!(json["packages"][0]["total_size_bytes"], 500); + assert_eq!(json["packages"][0]["file_count"], 3); } #[test] diff --git a/src/session.rs b/src/session.rs index 08c993b..23223e6 100644 --- a/src/session.rs +++ b/src/session.rs @@ -495,7 +495,9 @@ impl Session { target: resolved.label, found_in_graph: resolved.exists, chain_count: chains.len(), - direct_import: cuts.is_empty() && chains.iter().all(|c| c.len() == 2), + direct_import: !chains.is_empty() + && cuts.is_empty() + && chains.iter().all(|c| c.len() == 2), cut_points: cuts .iter() .map(|c| CutEntry { @@ -538,8 +540,8 @@ impl Session { .iter() .map(|pkg| PackageListEntry { name: pkg.name.clone(), - size: pkg.total_reachable_size, - files: pkg.total_reachable_files, + total_size_bytes: pkg.total_reachable_size, + file_count: pkg.total_reachable_files, }) .collect(), } @@ -1002,6 +1004,16 @@ mod tests { assert!(report.cut_points.is_empty()); } + #[test] + fn cut_report_nonexistent_target() { + let (_tmp, entry) = test_project(); + let mut session = Session::open(&entry, true).unwrap(); + let report = session.cut_report("nonexistent-pkg", 10, false); + assert!(!report.found_in_graph); + assert_eq!(report.chain_count, 0); + assert!(!report.direct_import); + } + #[test] fn packages_report_empty_for_first_party() { let (_tmp, entry) = test_project(); diff --git a/tests/cache_safety.rs b/tests/cache_safety.rs new file mode 100644 index 0000000..0df1701 --- /dev/null +++ b/tests/cache_safety.rs @@ -0,0 +1,70 @@ +mod common; + +use chainsaw::query::TraceOptions; +use chainsaw::session::Session; + +#[test] +fn trace_then_diff_then_trace_produces_same_results() { + let p = common::TestProject::new(); + + // First trace: build graph (writes cache) + let mut s1 = Session::open(&p.entry, false).unwrap(); + let r1 = s1.trace_report(&TraceOptions::default(), 20); + let weight_before = r1.static_weight_bytes; + let count_before = r1.static_module_count; + drop(s1); + + // Diff: compare index.ts vs b.ts (exercises diff code path) + let mut s2 = Session::open(&p.entry, false).unwrap(); + let b = p.root().join("b.ts"); + let _ = s2.diff_report(&b, &TraceOptions::default(), 10); + drop(s2); + + // Second trace: should load from cache, results must match + let mut s3 = Session::open(&p.entry, false).unwrap(); + let r3 = s3.trace_report(&TraceOptions::default(), 20); + assert_eq!( + r3.static_weight_bytes, weight_before, + "cache corruption: weight changed after diff" + ); + assert_eq!( + r3.static_module_count, count_before, + "cache corruption: module count changed after diff" + ); +} + +/// Multiple diff/trace cycles must produce stable results from cache. +/// +/// This is a stronger variant of the single-cycle test above: it runs three +/// diff operations interleaved with traces to verify that repeated diffs +/// don't cause progressive cache degradation. +#[test] +fn repeated_diff_cycles_produce_stable_trace() { + let p = common::TestProject::new(); + let b = p.root().join("b.ts"); + + // Initial trace (writes cache) + let mut s = Session::open(&p.entry, false).unwrap(); + let baseline = s.trace_report(&TraceOptions::default(), 20); + drop(s); + + for cycle in 0..3 { + // Diff operation + let mut s = Session::open(&p.entry, false).unwrap(); + let _ = s.diff_report(&b, &TraceOptions::default(), 10); + drop(s); + + // Verify trace from cache matches baseline + let mut s = Session::open(&p.entry, false).unwrap(); + let r = s.trace_report(&TraceOptions::default(), 20); + assert_eq!( + r.static_weight_bytes, baseline.static_weight_bytes, + "cycle {cycle}: weight diverged from baseline" + ); + assert_eq!( + r.static_module_count, baseline.static_module_count, + "cycle {cycle}: module count diverged from baseline" + ); + drop(s); + } +} diff --git a/tests/cli.rs b/tests/cli.rs new file mode 100644 index 0000000..aff8c93 --- /dev/null +++ b/tests/cli.rs @@ -0,0 +1,326 @@ +mod common; + +use assert_cmd::Command; +use predicates::prelude::*; + +fn chainsaw() -> Command { + assert_cmd::cargo_bin_cmd!("chainsaw") +} + +// --- trace subcommand --- + +#[test] +fn trace_produces_output() { + let p = common::TestProject::new(); + chainsaw() + .args(["trace", "--no-cache"]) + .arg(&p.entry) + .assert() + .success() + .stdout(predicate::str::contains("lodash")); +} + +#[test] +fn trace_json_produces_valid_json() { + let p = common::TestProject::new(); + let output = chainsaw() + .args(["trace", "--json", "--no-cache"]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(output.status.success()); + let v: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + assert!(v["static_weight_bytes"].is_number()); + assert!(v["static_module_count"].is_number()); + assert!(v["heavy_packages"].is_array()); + assert!(v["modules_by_cost"].is_array()); +} + +#[test] +fn trace_chain_finds_lodash() { + let p = common::TestProject::new(); + chainsaw() + .args(["trace", "--chain", "lodash", "--no-cache"]) + .arg(&p.entry) + .assert() + .success() + .stdout(predicate::str::contains("lodash")); +} + +#[test] +fn trace_chain_json() { + let p = common::TestProject::new(); + let output = chainsaw() + .args(["trace", "--chain", "lodash", "--json", "--no-cache"]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(output.status.success()); + let v: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + assert!(v["chain_count"].is_number()); + assert!(v["chains"].is_array()); +} + +#[test] +fn trace_cut_finds_cut_points() { + let p = common::TestProject::new(); + chainsaw() + .args(["trace", "--cut", "lodash", "--no-cache"]) + .arg(&p.entry) + .assert() + .success() + .stdout(predicate::str::contains("a.ts")); +} + +#[test] +fn trace_cut_json() { + let p = common::TestProject::new(); + let output = chainsaw() + .args(["trace", "--cut", "lodash", "--json", "--no-cache"]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(output.status.success()); + let v: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + assert!(v["cut_points"].is_array()); + assert!(v["direct_import"].is_boolean()); +} + +#[test] +fn trace_include_dynamic() { + let p = common::TestProject::new(); + // Without --include-dynamic: c.ts is dynamic-only, not in static count + let without = chainsaw() + .args(["trace", "--json", "--no-cache"]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(without.status.success()); + let v_without: serde_json::Value = serde_json::from_slice(&without.stdout).unwrap(); + let static_without = v_without["static_module_count"].as_u64().unwrap(); + let dyn_without = v_without["dynamic_only_module_count"].as_u64().unwrap(); + assert!( + dyn_without > 0, + "c.ts should be dynamic-only without --include-dynamic" + ); + + // With --include-dynamic: dynamic modules merge into static count + let with = chainsaw() + .args(["trace", "--json", "--include-dynamic", "--no-cache"]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(with.status.success()); + let v_with: serde_json::Value = serde_json::from_slice(&with.stdout).unwrap(); + let static_with = v_with["static_module_count"].as_u64().unwrap(); + assert!( + static_with > static_without, + "include-dynamic should merge c.ts into static count: {static_with} vs {static_without}" + ); +} + +#[test] +fn trace_top_zero_hides_packages() { + let p = common::TestProject::new(); + let output = chainsaw() + .args(["trace", "--json", "--top", "0", "--no-cache"]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(output.status.success()); + let v: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + assert_eq!(v["heavy_packages"].as_array().unwrap().len(), 0); +} + +#[test] +fn trace_save_creates_snapshot() { + let p = common::TestProject::new(); + let snap = p.root().join("snapshot.json"); + chainsaw() + .args(["trace", "--save", snap.to_str().unwrap(), "--no-cache"]) + .arg(&p.entry) + .assert() + .success(); + assert!(snap.exists()); + let content: serde_json::Value = + serde_json::from_str(&std::fs::read_to_string(&snap).unwrap()).unwrap(); + assert!(content["static_weight"].is_number()); +} + +#[test] +fn trace_diff_from_snapshot() { + let p = common::TestProject::new(); + let snap = p.root().join("snap.json"); + // First, save a snapshot + chainsaw() + .args(["trace", "--save", snap.to_str().unwrap(), "--no-cache"]) + .arg(&p.entry) + .assert() + .success(); + // Then diff against it + chainsaw() + .args(["trace", "--diff-from", snap.to_str().unwrap(), "--no-cache"]) + .arg(&p.entry) + .assert() + .success(); +} + +#[test] +fn trace_diff_from_json() { + let p = common::TestProject::new(); + let snap = p.root().join("snap.json"); + chainsaw() + .args(["trace", "--save", snap.to_str().unwrap(), "--no-cache"]) + .arg(&p.entry) + .assert() + .success(); + let output = chainsaw() + .args([ + "trace", + "--diff-from", + snap.to_str().unwrap(), + "--json", + "--no-cache", + ]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(output.status.success()); + let v: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + assert!(v["weight_delta"].is_number()); +} + +#[test] +fn trace_max_weight_exceeded() { + let p = common::TestProject::new(); + // Threshold of 1 byte -- guaranteed to exceed + chainsaw() + .args(["trace", "--max-weight", "1", "--no-cache"]) + .arg(&p.entry) + .assert() + .failure() + .stderr(predicate::str::contains("exceeds --max-weight")); +} + +#[test] +fn trace_max_weight_ok() { + let p = common::TestProject::new(); + // Threshold of 100MB -- guaranteed to be under + chainsaw() + .args(["trace", "--max-weight", "100MB", "--no-cache"]) + .arg(&p.entry) + .assert() + .success(); +} + +#[test] +fn trace_mutually_exclusive_chain_cut() { + let p = common::TestProject::new(); + chainsaw() + .args([ + "trace", + "--chain", + "lodash", + "--cut", + "lodash", + "--no-cache", + ]) + .arg(&p.entry) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used together")); +} + +#[test] +fn trace_chain_target_is_entry() { + let p = common::TestProject::new(); + chainsaw() + .args(["trace", "--chain", p.entry.to_str().unwrap(), "--no-cache"]) + .arg(&p.entry) + .assert() + .failure(); +} + +// --- packages subcommand --- + +#[test] +fn packages_lists_lodash() { + let p = common::TestProject::new(); + chainsaw() + .args(["packages", "--no-cache"]) + .arg(&p.entry) + .assert() + .success() + .stdout(predicate::str::contains("lodash")); +} + +#[test] +fn packages_json() { + let p = common::TestProject::new(); + let output = chainsaw() + .args(["packages", "--json", "--no-cache"]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(output.status.success()); + let v: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + assert!(v["package_count"].is_number()); + assert!(v["packages"].is_array()); + let pkg = &v["packages"][0]; + assert!(pkg["name"].is_string()); + assert!(pkg["total_size_bytes"].is_number()); + assert!(pkg["file_count"].is_number()); +} + +// --- error cases --- + +#[test] +fn missing_entry_file() { + chainsaw() + .args(["trace", "/nonexistent/file.ts"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot find entry file")); +} + +#[test] +fn entry_is_directory() { + let p = common::TestProject::new(); + chainsaw() + .args(["trace", "--no-cache"]) + .arg(p.root()) + .assert() + .failure() + .stderr(predicate::str::contains("directory")); +} + +#[test] +fn unsupported_file_type() { + let p = common::TestProject::new(); + let rs_file = p.root().join("main.rs"); + std::fs::write(&rs_file, "fn main() {}").unwrap(); + chainsaw() + .args(["trace", "--no-cache"]) + .arg(&rs_file) + .assert() + .failure() + .stderr(predicate::str::contains("unsupported file type")); +} + +// --- quiet flag --- + +#[test] +fn quiet_suppresses_timing() { + let p = common::TestProject::new(); + let output = chainsaw() + .args(["trace", "--quiet", "--no-cache"]) + .arg(&p.entry) + .output() + .unwrap(); + assert!(output.status.success()); + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !stderr.contains("ms"), + "quiet should suppress timing, got: {stderr}" + ); +} diff --git a/tests/common/mod.rs b/tests/common/mod.rs new file mode 100644 index 0000000..ea7efb4 --- /dev/null +++ b/tests/common/mod.rs @@ -0,0 +1,85 @@ +use std::path::{Path, PathBuf}; + +/// A self-contained TypeScript project for integration tests. +/// +/// Structure: +/// index.ts -> imports ./a, ./b; dynamic import("./c") +/// a.ts -> imports lodash +/// b.ts -> imports ./a (creates a diamond: index->b->a->lodash) +/// c.ts -> standalone (dynamic-only) +/// node_modules/lodash/package.json + index.js +/// +/// Properties: +/// - 4 first-party modules + 1 third-party package (lodash) +/// - Static chain to lodash: index->a->lodash, index->b->a->lodash +/// - Cut point for lodash: a.ts (all static chains pass through it) +/// - Dynamic-only module: c.ts +/// - Diamond dependency: a.ts reachable via index->a and index->b->a +pub struct TestProject { + pub dir: tempfile::TempDir, + pub entry: PathBuf, +} + +impl TestProject { + /// Create the fixture. Caller must keep the returned value alive + /// (dropping `TempDir` deletes the files). + pub fn new() -> Self { + let dir = tempfile::tempdir().unwrap(); + let root = dir.path(); + + std::fs::write( + root.join("package.json"), + r#"{"name":"test-project","version":"1.0.0"}"#, + ) + .unwrap(); + + // index.ts: static imports + one dynamic import + std::fs::write( + root.join("index.ts"), + concat!( + "import { a } from './a';\n", + "import { b } from './b';\n", + "const c = import('./c');\n", + ), + ) + .unwrap(); + + // a.ts: imports a third-party package + std::fs::write( + root.join("a.ts"), + "import _ from 'lodash';\nexport const a = 1;\n", + ) + .unwrap(); + + // b.ts: imports ./a (diamond) + std::fs::write( + root.join("b.ts"), + "import { a } from './a';\nexport const b = a + 1;\n", + ) + .unwrap(); + + // c.ts: standalone, only reachable via dynamic import + std::fs::write(root.join("c.ts"), "export const c = 'dynamic only';\n").unwrap(); + + // node_modules/lodash — minimal third-party package + let lodash = root.join("node_modules/lodash"); + std::fs::create_dir_all(&lodash).unwrap(); + std::fs::write( + lodash.join("package.json"), + r#"{"name":"lodash","version":"4.17.21","main":"index.js"}"#, + ) + .unwrap(); + std::fs::write( + lodash.join("index.js"), + "module.exports = { identity: function(x) { return x; } };\n", + ) + .unwrap(); + + let entry = root.join("index.ts"); + Self { dir, entry } + } + + pub fn root(&self) -> &Path { + self.dir.path() + } +} diff --git a/tests/git_refs.rs b/tests/git_refs.rs new file mode 100644 index 0000000..a216dc0 --- /dev/null +++ b/tests/git_refs.rs @@ -0,0 +1,191 @@ +use std::process::Command; + +use chainsaw::git::{DiffArg, classify_diff_arg}; + +/// Create a minimal git repo with one commit. Returns (TempDir, SHA). +fn git_repo() -> (tempfile::TempDir, String) { + let tmp = tempfile::tempdir().unwrap(); + let d = tmp.path(); + let git = |args: &[&str]| { + Command::new("git") + .args(args) + .current_dir(d) + .output() + .unwrap() + }; + git(&["init"]); + git(&["config", "user.email", "t@t.com"]); + git(&["config", "user.name", "T"]); + std::fs::write(d.join("f.txt"), "x").unwrap(); + git(&["add", "."]); + git(&["commit", "-m", "init"]); + let sha = String::from_utf8(git(&["rev-parse", "HEAD"]).stdout) + .unwrap() + .trim() + .to_string(); + (tmp, sha) +} + +// --- Lightweight tags --- + +#[test] +fn lightweight_tag() { + let (tmp, _) = git_repo(); + Command::new("git") + .args(["tag", "v1.0"]) + .current_dir(tmp.path()) + .output() + .unwrap(); + assert!(matches!( + classify_diff_arg("v1.0", tmp.path()), + Ok(DiffArg::GitRef(_)) + )); +} + +// --- Annotated tags --- + +#[test] +fn annotated_tag() { + let (tmp, _) = git_repo(); + Command::new("git") + .args(["tag", "-a", "v2.0", "-m", "release"]) + .current_dir(tmp.path()) + .output() + .unwrap(); + assert!(matches!( + classify_diff_arg("v2.0", tmp.path()), + Ok(DiffArg::GitRef(_)) + )); +} + +// --- Branch names with slashes --- + +#[test] +fn branch_with_single_slash() { + let (tmp, _) = git_repo(); + Command::new("git") + .args(["branch", "feature/auth"]) + .current_dir(tmp.path()) + .output() + .unwrap(); + assert!(matches!( + classify_diff_arg("feature/auth", tmp.path()), + Ok(DiffArg::GitRef(_)) + )); +} + +#[test] +fn branch_with_multiple_slashes() { + let (tmp, _) = git_repo(); + Command::new("git") + .args(["branch", "fix/bug/123"]) + .current_dir(tmp.path()) + .output() + .unwrap(); + assert!(matches!( + classify_diff_arg("fix/bug/123", tmp.path()), + Ok(DiffArg::GitRef(_)) + )); +} + +// --- SHA variants --- + +#[test] +fn full_sha() { + let (tmp, sha) = git_repo(); + assert_eq!( + classify_diff_arg(&sha, tmp.path()).unwrap(), + DiffArg::GitRef(sha) + ); +} + +#[test] +fn short_sha_7() { + let (tmp, sha) = git_repo(); + let short = &sha[..7]; + assert!(matches!( + classify_diff_arg(short, tmp.path()), + Ok(DiffArg::GitRef(_)) + )); +} + +#[test] +fn short_sha_4() { + let (tmp, sha) = git_repo(); + let short = &sha[..4]; + assert!(matches!( + classify_diff_arg(short, tmp.path()), + Ok(DiffArg::GitRef(_)) + )); +} + +// --- HEAD variants --- + +#[test] +fn head_tilde_0() { + let (tmp, _) = git_repo(); + assert!(matches!( + classify_diff_arg("HEAD~0", tmp.path()), + Ok(DiffArg::GitRef(_)) + )); +} + +#[test] +fn head_caret() { + let (tmp, _) = git_repo(); + // HEAD^0 peels to the commit itself + assert!(matches!( + classify_diff_arg("HEAD^0", tmp.path()), + Ok(DiffArg::GitRef(_)) + )); +} + +// --- Ambiguity: file vs ref --- + +#[test] +fn file_beats_branch_when_both_exist() { + let (tmp, _) = git_repo(); + // Create a file named "main" -- should resolve as Snapshot + let main_file = tmp.path().join("main"); + std::fs::write(&main_file, "{}").unwrap(); + assert!(matches!( + classify_diff_arg(main_file.to_str().unwrap(), tmp.path()), + Ok(DiffArg::Snapshot(_)) + )); +} + +// --- Error cases --- + +#[test] +fn nonexistent_path_with_slash_is_file_not_found() { + let (tmp, _) = git_repo(); + let result = classify_diff_arg("some/nonexistent/path", tmp.path()); + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("file not found"), "got: {err}"); +} + +#[test] +fn json_extension_nonexistent_is_file_not_found() { + let (tmp, _) = git_repo(); + let result = classify_diff_arg("missing.json", tmp.path()); + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("file not found"), "got: {err}"); +} + +#[test] +fn nonsense_string_is_not_snapshot_or_ref() { + let (tmp, _) = git_repo(); + let result = classify_diff_arg("xyzzy-nope", tmp.path()); + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("not a snapshot file"), "got: {err}"); +} + +#[test] +fn empty_string_is_error() { + let (tmp, _) = git_repo(); + let result = classify_diff_arg("", tmp.path()); + assert!(result.is_err()); +} diff --git a/tests/json_roundtrip.rs b/tests/json_roundtrip.rs new file mode 100644 index 0000000..a496edb --- /dev/null +++ b/tests/json_roundtrip.rs @@ -0,0 +1,150 @@ +mod common; + +use chainsaw::query::TraceOptions; +use chainsaw::session::Session; + +/// Parse JSON, assert a field exists with the expected type. +fn assert_field(v: &serde_json::Value, key: &str, check: fn(&serde_json::Value) -> bool) { + assert!( + v.get(key).is_some_and(check), + "field '{key}' missing or wrong type in: {v}" + ); +} + +fn assert_no_field(v: &serde_json::Value, key: &str) { + assert!( + v.get(key).is_none(), + "field '{key}' should not be present (serde(skip)): {v}" + ); +} + +fn open() -> (common::TestProject, Session) { + let p = common::TestProject::new(); + let entry = p.entry.clone(); + let session = Session::open(&entry, true).unwrap(); + (p, session) +} + +// --- TraceReport --- + +#[test] +fn trace_report_json_schema() { + let (_p, mut session) = open(); + let json = session.trace_report(&TraceOptions::default(), 20).to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + + assert_field(&v, "entry", |v| v.is_string()); + assert_field(&v, "static_weight_bytes", |v| v.is_number()); + assert_field(&v, "static_module_count", |v| v.is_number()); + assert_field(&v, "dynamic_only_weight_bytes", |v| v.is_number()); + assert_field(&v, "dynamic_only_module_count", |v| v.is_number()); + assert_field(&v, "heavy_packages", |v| v.is_array()); + assert_field(&v, "modules_by_cost", |v| v.is_array()); + assert_field(&v, "total_modules_with_cost", |v| v.is_number()); + + // serde(skip) fields + assert_no_field(&v, "include_dynamic"); + assert_no_field(&v, "top"); + + // Nested: heavy_packages entries + let pkg = &v["heavy_packages"][0]; + assert_field(pkg, "name", |v| v.is_string()); + assert_field(pkg, "total_size_bytes", |v| v.is_number()); + assert_field(pkg, "file_count", |v| v.is_number()); + assert_field(pkg, "chain", |v| v.is_array()); + + // Nested: modules_by_cost entries + let mod_entry = &v["modules_by_cost"][0]; + assert_field(mod_entry, "path", |v| v.is_string()); + assert_field(mod_entry, "exclusive_size_bytes", |v| v.is_number()); +} + +// --- ChainReport --- + +#[test] +fn chain_report_json_schema() { + let (_p, session) = open(); + let json = session.chain_report("lodash", false).to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + + assert_field(&v, "target", |v| v.is_string()); + assert_field(&v, "found_in_graph", |v| v.is_boolean()); + assert_field(&v, "chain_count", |v| v.is_number()); + assert_field(&v, "hop_count", |v| v.is_number()); + assert_field(&v, "chains", |v| v.is_array()); + + // Each chain is an array of strings + let chain = &v["chains"][0]; + assert!(chain.is_array()); + assert!(chain[0].is_string()); +} + +// --- CutReport --- + +#[test] +fn cut_report_json_schema() { + let (_p, mut session) = open(); + let json = session.cut_report("lodash", 10, false).to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + + assert_field(&v, "target", |v| v.is_string()); + assert_field(&v, "found_in_graph", |v| v.is_boolean()); + assert_field(&v, "chain_count", |v| v.is_number()); + assert_field(&v, "direct_import", |v| v.is_boolean()); + assert_field(&v, "cut_points", |v| v.is_array()); + + let cut = &v["cut_points"][0]; + assert_field(cut, "module", |v| v.is_string()); + assert_field(cut, "exclusive_size_bytes", |v| v.is_number()); + assert_field(cut, "chains_broken", |v| v.is_number()); +} + +// --- DiffReport --- + +#[test] +fn diff_report_json_schema() { + let (p, mut session) = open(); + let b = p.root().join("b.ts"); + let json = session + .diff_report(&b, &TraceOptions::default(), 10) + .unwrap() + .to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + + assert_field(&v, "entry_a", |v| v.is_string()); + assert_field(&v, "entry_b", |v| v.is_string()); + assert_field(&v, "weight_a", |v| v.is_number()); + assert_field(&v, "weight_b", |v| v.is_number()); + assert_field(&v, "weight_delta", |v| v.is_number()); + assert_field(&v, "dynamic_weight_a", |v| v.is_number()); + assert_field(&v, "dynamic_weight_b", |v| v.is_number()); + assert_field(&v, "dynamic_weight_delta", |v| v.is_number()); + assert_field(&v, "shared_count", |v| v.is_number()); + assert_field(&v, "only_in_a", |v| v.is_array()); + assert_field(&v, "only_in_b", |v| v.is_array()); + assert_field(&v, "dynamic_only_in_a", |v| v.is_array()); + assert_field(&v, "dynamic_only_in_b", |v| v.is_array()); + + assert_no_field(&v, "limit"); +} + +// --- PackagesReport --- + +#[test] +fn packages_report_json_schema() { + let (_p, session) = open(); + let json = session.packages_report(10).to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + + assert_field(&v, "package_count", |v| v.is_number()); + assert_field(&v, "packages", |v| v.is_array()); + + let pkg = &v["packages"][0]; + assert_field(pkg, "name", |v| v.is_string()); + assert_field(pkg, "total_size_bytes", |v| v.is_number()); + assert_field(pkg, "file_count", |v| v.is_number()); + + // Old field names must NOT appear + assert_no_field(pkg, "size"); + assert_no_field(pkg, "files"); +} diff --git a/tests/negative_cases.rs b/tests/negative_cases.rs new file mode 100644 index 0000000..3a28268 --- /dev/null +++ b/tests/negative_cases.rs @@ -0,0 +1,291 @@ +mod common; + +use chainsaw::error::Error; +use chainsaw::repl::Command; +use chainsaw::session::Session; + +// --- parse_flags edge cases (via Command::parse) --- + +#[test] +fn unknown_flag_produces_error_command() { + let cmd = Command::parse("trace --bogus"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn unknown_flag_in_chain_produces_error() { + let cmd = Command::parse("chain lodash --bogus"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn unknown_flag_in_imports_produces_error() { + let cmd = Command::parse("imports ./a.ts --bogus"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn unknown_flag_in_importers_produces_error() { + let cmd = Command::parse("importers ./a.ts --bogus"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn unknown_flag_in_cut_produces_error() { + let cmd = Command::parse("cut lodash --bogus"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn unknown_flag_in_packages_produces_error() { + let cmd = Command::parse("packages --bogus"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn unknown_flag_in_diff_produces_error() { + let cmd = Command::parse("diff src/other.ts --bogus"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +// --- Command::parse edge cases --- + +#[test] +fn empty_input_is_help() { + assert!(matches!(Command::parse(""), Command::Help)); +} + +#[test] +fn whitespace_only_is_help() { + assert!(matches!(Command::parse(" "), Command::Help)); +} + +#[test] +fn unknown_command() { + let cmd = Command::parse("frobnicate"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn chain_without_target() { + let cmd = Command::parse("chain"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn cut_without_target() { + let cmd = Command::parse("cut"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn imports_without_target() { + let cmd = Command::parse("imports"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn importers_without_target() { + let cmd = Command::parse("importers"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn entry_without_path() { + let cmd = Command::parse("entry"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn diff_without_path() { + let cmd = Command::parse("diff"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn info_without_name() { + let cmd = Command::parse("info"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn set_without_option() { + let cmd = Command::parse("set"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +#[test] +fn unset_without_option() { + let cmd = Command::parse("unset"); + assert!(matches!(cmd, Command::Unknown(_))); +} + +// --- Session query edge cases --- + +#[test] +fn chain_report_self_referential() { + let p = common::TestProject::new(); + let session = Session::open(&p.entry, true).unwrap(); + // Chain to the entry itself -- the BFS finds the trivial path [entry]. + // The REPL dispatch guards against this ("target is the entry point + // itself"), but chain_report itself just returns what BFS finds. + let entry_name = p.entry.file_name().unwrap().to_str().unwrap(); + let report = session.chain_report(entry_name, false); + assert!(report.found_in_graph); + // Trivial chain: just the entry node, so 0 hops + assert_eq!(report.hop_count, 0); +} + +#[test] +fn chain_report_nonexistent_target() { + let p = common::TestProject::new(); + let session = Session::open(&p.entry, true).unwrap(); + let report = session.chain_report("nonexistent-pkg", false); + assert!(!report.found_in_graph); + assert_eq!(report.chain_count, 0); + assert!(report.chains.is_empty()); +} + +#[test] +fn cut_report_nonexistent_target() { + let p = common::TestProject::new(); + let mut session = Session::open(&p.entry, true).unwrap(); + let report = session.cut_report("nonexistent-pkg", 10, false); + assert!(!report.found_in_graph); + assert_eq!(report.chain_count, 0); + assert!(report.cut_points.is_empty()); + assert!( + !report.direct_import, + "vacuous truth: no chains should not mean direct_import" + ); +} + +#[test] +fn imports_nonexistent_file() { + let p = common::TestProject::new(); + let session = Session::open(&p.entry, true).unwrap(); + let result = session.imports(&p.root().join("nonexistent.ts")); + assert!(result.is_err()); +} + +#[test] +fn importers_nonexistent_file() { + let p = common::TestProject::new(); + let session = Session::open(&p.entry, true).unwrap(); + let result = session.importers(&p.root().join("nonexistent.ts")); + assert!(result.is_err()); +} + +// --- Error Display and hints --- + +#[test] +fn all_error_variants_display_without_panic() { + use std::path::PathBuf; + + let errors: Vec = vec![ + Error::EntryNotFound( + PathBuf::from("/tmp/x.ts"), + std::io::Error::new(std::io::ErrorKind::NotFound, "not found"), + ), + Error::EntryIsDirectory(PathBuf::from("/tmp")), + Error::UnsupportedFileType(Some("rs".to_string())), + Error::UnsupportedFileType(None), + Error::EntryNotInGraph(PathBuf::from("index.ts")), + Error::SnapshotRead( + PathBuf::from("snap.json"), + std::io::Error::new(std::io::ErrorKind::NotFound, "not found"), + ), + Error::SnapshotParse( + PathBuf::from("x"), + serde_json::from_str::("invalid").unwrap_err(), + ), + Error::SnapshotWrite( + PathBuf::from("snap.json"), + std::io::Error::new(std::io::ErrorKind::PermissionDenied, "denied"), + ), + Error::MutuallyExclusiveFlags("--chain and --cut".to_string()), + Error::TargetIsEntryPoint("--chain".to_string()), + Error::EntryRequired, + Error::NotAGitRepo, + Error::NotSnapshotOrRef("xyzzy".to_string()), + Error::DiffFileNotFound("missing.json".to_string()), + Error::GitError("failed".to_string()), + Error::InvalidTopValue("--top", -5), + Error::Readline("init failed".to_string()), + Error::MaxWeightExceeded { + kind: "static", + weight: 5_000_000, + module_count: 100, + threshold: 1_000_000, + }, + ]; + for err in &errors { + let msg = err.to_string(); + assert!(!msg.is_empty(), "empty display for {err:?}"); + // hint() should not panic for any variant + let _ = err.hint(); + } +} + +#[test] +fn error_hints_are_present_where_expected() { + use std::path::PathBuf; + + // Variants that SHOULD have hints + assert!( + Error::UnsupportedFileType(Some("rs".to_string())) + .hint() + .is_some() + ); + assert!(Error::UnsupportedFileType(None).hint().is_some()); + assert!( + Error::EntryNotInGraph(PathBuf::from("x.ts")) + .hint() + .is_some() + ); + assert!( + Error::TargetIsEntryPoint("--chain".to_string()) + .hint() + .is_some() + ); + assert!( + Error::TargetIsEntryPoint("--cut".to_string()) + .hint() + .is_some() + ); + assert!(Error::EntryRequired.hint().is_some()); + assert!( + Error::EntryIsDirectory(PathBuf::from("/tmp")) + .hint() + .is_some() + ); + + // Variants that should NOT have hints + assert!(Error::NotAGitRepo.hint().is_none()); + assert!(Error::GitError("x".to_string()).hint().is_none()); + assert!(Error::Readline("x".to_string()).hint().is_none()); +} + +// --- Open session with bad entry --- + +#[test] +fn session_open_nonexistent_entry() { + let result = Session::open(std::path::Path::new("/nonexistent/entry.ts"), true); + assert!(result.is_err()); +} + +#[test] +fn session_open_directory_entry() { + let p = common::TestProject::new(); + let result = Session::open(p.root(), true); + assert!(result.is_err()); +} + +#[test] +fn session_open_unsupported_extension() { + let p = common::TestProject::new(); + let rs_file = p.root().join("main.rs"); + std::fs::write(&rs_file, "fn main() {}").unwrap(); + let result = Session::open(&rs_file, true); + assert!(result.is_err()); +} diff --git a/tests/property.rs b/tests/property.rs new file mode 100644 index 0000000..8cef883 --- /dev/null +++ b/tests/property.rs @@ -0,0 +1,37 @@ +use proptest::prelude::*; + +use chainsaw::repl::Command; +use chainsaw::report::format_size; + +proptest! { + /// Command::parse must never panic, regardless of input. + #[test] + fn command_parse_never_panics(input in "\\PC{0,200}") { + let _ = Command::parse(&input); + } + + /// Command::parse with structured REPL-like input. + #[test] + fn command_parse_structured( + cmd in "(trace|chain|cut|diff|packages|imports|importers|entry|set|unset|show|help|quit|info)", + arg in "[a-zA-Z0-9_./@-]{0,50}", + flag in "(--json|--include-dynamic|--top|--top-modules|--ignore|--no-include-dynamic|)", + flag_val in "[a-zA-Z0-9-]{0,10}", + ) { + let line = format!("{cmd} {arg} {flag} {flag_val}"); + let _ = Command::parse(&line); + } + + /// format_size produces non-empty output for any u64. + #[test] + fn format_size_never_empty(n: u64) { + let s = format_size(n); + prop_assert!(!s.is_empty()); + } + + /// format_size never panics for any u64. + #[test] + fn format_size_no_panic(n in 0u64..u64::MAX) { + let _ = format_size(n); + } +} diff --git a/tests/report_battery.rs b/tests/report_battery.rs new file mode 100644 index 0000000..56bfa7c --- /dev/null +++ b/tests/report_battery.rs @@ -0,0 +1,94 @@ +mod common; + +use chainsaw::query::TraceOptions; +use chainsaw::session::Session; + +/// Build all report types from a single session for reuse. +struct Reports { + _project: common::TestProject, + trace_json: String, + trace_terminal: String, + chain_json: String, + chain_terminal: String, + cut_json: String, + cut_terminal: String, + diff_json: String, + diff_terminal: String, + packages_json: String, + packages_terminal: String, +} + +fn build_reports() -> Reports { + let p = common::TestProject::new(); + let entry = p.entry.clone(); + let b = p.root().join("b.ts"); + let mut session = Session::open(&entry, true).unwrap(); + + let trace = session.trace_report(&TraceOptions::default(), 20); + let chain = session.chain_report("lodash", false); + let cut = session.cut_report("lodash", 10, false); + let diff = session + .diff_report(&b, &TraceOptions::default(), 10) + .unwrap(); + let packages = session.packages_report(10); + + Reports { + _project: p, + trace_json: trace.to_json(), + trace_terminal: trace.to_terminal(false), + chain_json: chain.to_json(), + chain_terminal: chain.to_terminal(false), + cut_json: cut.to_json(), + cut_terminal: cut.to_terminal(false), + diff_json: diff.to_json(), + diff_terminal: diff.to_terminal(false), + packages_json: packages.to_json(), + packages_terminal: packages.to_terminal(false), + } +} + +macro_rules! report_battery { + ($name:ident, $json_field:ident, $terminal_field:ident) => { + mod $name { + use super::*; + + #[test] + fn json_is_valid() { + let r = build_reports(); + let v: serde_json::Value = serde_json::from_str(&r.$json_field).unwrap(); + assert!(v.is_object(), "top-level should be an object"); + } + + #[test] + fn json_has_no_null_values() { + let r = build_reports(); + let v: serde_json::Value = serde_json::from_str(&r.$json_field).unwrap(); + for (key, val) in v.as_object().unwrap() { + assert!(!val.is_null(), "field '{key}' is null"); + } + } + + #[test] + fn terminal_is_nonempty() { + let r = build_reports(); + assert!(!r.$terminal_field.is_empty()); + } + + #[test] + fn terminal_no_ansi_when_color_false() { + let r = build_reports(); + // ESC character (0x1B) starts ANSI escape sequences + assert!( + !r.$terminal_field.contains('\x1b'), + "terminal output with color=false should have no ANSI codes" + ); + } + } + }; +} + +report_battery!(trace, trace_json, trace_terminal); +report_battery!(chain, chain_json, chain_terminal); +report_battery!(cut, cut_json, cut_terminal); +report_battery!(diff, diff_json, diff_terminal); +report_battery!(packages, packages_json, packages_terminal); diff --git a/tests/session_reports.rs b/tests/session_reports.rs new file mode 100644 index 0000000..e9fa3b5 --- /dev/null +++ b/tests/session_reports.rs @@ -0,0 +1,303 @@ +mod common; + +use chainsaw::query::TraceOptions; +use chainsaw::repl::{CommandOptions, ReplSettings}; +use chainsaw::session::Session; + +fn open_session() -> (common::TestProject, Session) { + let p = common::TestProject::new(); + let entry = p.entry.clone(); + let session = Session::open(&entry, true).unwrap(); + (p, session) +} + +// --- trace_report --- + +#[test] +fn trace_report_returns_valid_json() { + let (_p, mut session) = open_session(); + let report = session.trace_report(&TraceOptions::default(), 20); + let json = report.to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + assert!(v["static_weight_bytes"].is_number()); + assert!(v["static_module_count"].is_number()); + assert!(v["heavy_packages"].is_array()); + assert!(v["modules_by_cost"].is_array()); + assert!(v["total_modules_with_cost"].is_number()); + // Skipped fields must not appear in JSON + assert!(v.get("include_dynamic").is_none()); + assert!(v.get("top").is_none()); +} + +#[test] +fn trace_report_static_only_shows_dynamic_separately() { + let (_p, mut session) = open_session(); + // With include_dynamic: false (default), c.ts is reported as dynamic-only + let report = session.trace_report(&TraceOptions::default(), 20); + assert!( + report.dynamic_only_module_count > 0, + "c.ts should be reported as dynamic-only when include_dynamic is false" + ); + assert!(report.dynamic_only_weight_bytes > 0); +} + +#[test] +fn trace_report_include_dynamic_folds_into_static() { + let (_p, mut session) = open_session(); + let static_report = session.trace_report(&TraceOptions::default(), 20); + let static_count = static_report.static_module_count; + + let opts = TraceOptions { + include_dynamic: true, + ..TraceOptions::default() + }; + let dyn_report = session.trace_report(&opts, 20); + // When include_dynamic is true, dynamic modules fold into static count + assert_eq!(dyn_report.dynamic_only_module_count, 0); + assert_eq!(dyn_report.dynamic_only_weight_bytes, 0); + // Total module count should increase (c.ts is now included) + assert!( + dyn_report.static_module_count > static_count, + "include_dynamic should increase module count" + ); +} + +#[test] +fn trace_report_top_zero_hides_packages() { + let (_p, mut session) = open_session(); + let opts = TraceOptions { + top_n: 0, + ..TraceOptions::default() + }; + let report = session.trace_report(&opts, 20); + assert!(report.heavy_packages.is_empty()); +} + +#[test] +fn trace_report_top_modules_zero_hides_modules() { + let (_p, mut session) = open_session(); + let report = session.trace_report(&TraceOptions::default(), 0); + assert!(report.modules_by_cost.is_empty()); + assert!( + report.total_modules_with_cost > 0, + "count should still be set" + ); +} + +// --- chain_report --- + +#[test] +fn chain_report_finds_lodash() { + let (_p, session) = open_session(); + let report = session.chain_report("lodash", false); + assert!(report.found_in_graph); + assert!(report.chain_count > 0); + assert_eq!(report.chains.len(), report.chain_count); +} + +#[test] +fn chain_report_json_has_expected_fields() { + let (_p, session) = open_session(); + let report = session.chain_report("lodash", false); + let json = report.to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + assert!(v["target"].is_string()); + assert!(v["found_in_graph"].is_boolean()); + assert!(v["chain_count"].is_number()); + assert!(v["hop_count"].is_number()); + assert!(v["chains"].is_array()); +} + +#[test] +fn chain_report_nonexistent_target() { + let (_p, session) = open_session(); + let report = session.chain_report("nonexistent-pkg", false); + assert!(!report.found_in_graph); + assert_eq!(report.chain_count, 0); + assert!(report.chains.is_empty()); +} + +// --- cut_report --- + +#[test] +fn cut_report_finds_cut_for_lodash() { + let (_p, mut session) = open_session(); + let report = session.cut_report("lodash", 10, false); + assert!(report.found_in_graph); + assert!(!report.cut_points.is_empty()); + // a.ts is the cut point -- all chains to lodash pass through it + let names: Vec<&str> = report + .cut_points + .iter() + .map(|c| c.module.as_str()) + .collect(); + assert!( + names.iter().any(|n| n.contains("a.ts")), + "a.ts should be a cut point, got: {names:?}" + ); +} + +#[test] +fn cut_report_nonexistent_target_not_direct() { + let (_p, mut session) = open_session(); + let report = session.cut_report("nonexistent-pkg", 10, false); + assert!(!report.found_in_graph); + assert!( + !report.direct_import, + "vacuous truth: no chains should not mean direct_import" + ); +} + +#[test] +fn cut_report_json_has_expected_fields() { + let (_p, mut session) = open_session(); + let report = session.cut_report("lodash", 10, false); + let json = report.to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + assert!(v["target"].is_string()); + assert!(v["found_in_graph"].is_boolean()); + assert!(v["chain_count"].is_number()); + assert!(v["direct_import"].is_boolean()); + assert!(v["cut_points"].is_array()); +} + +// --- packages_report --- + +#[test] +fn packages_report_lists_lodash() { + let (_p, session) = open_session(); + let report = session.packages_report(10); + assert!(report.package_count > 0); + let names: Vec<&str> = report.packages.iter().map(|p| p.name.as_str()).collect(); + assert!(names.contains(&"lodash")); +} + +#[test] +fn packages_report_json_field_names() { + let (_p, session) = open_session(); + let report = session.packages_report(10); + let json = report.to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + assert!(v["package_count"].is_number()); + let pkg = &v["packages"][0]; + assert!(pkg["name"].is_string()); + assert!(pkg["total_size_bytes"].is_number()); + assert!(pkg["file_count"].is_number()); + // Old field names must NOT appear + assert!( + pkg.get("size").is_none(), + "field should be total_size_bytes, not size" + ); + assert!( + pkg.get("files").is_none(), + "field should be file_count, not files" + ); +} + +#[test] +fn packages_report_top_zero() { + let (_p, session) = open_session(); + let report = session.packages_report(0); + assert!(report.packages.is_empty()); + assert!(report.package_count > 0, "count should still reflect total"); +} + +// --- CommandOptions::resolve wiring --- + +#[test] +fn command_options_resolve_defaults_to_settings() { + let opts = CommandOptions::default(); + let settings = ReplSettings::default(); + let (trace_opts, top_modules) = opts.resolve(&settings); + assert!(!trace_opts.include_dynamic); + assert_eq!(trace_opts.top_n, chainsaw::report::DEFAULT_TOP); + assert_eq!(top_modules, chainsaw::report::DEFAULT_TOP_MODULES); +} + +#[test] +fn command_options_resolve_overrides_settings() { + let mut opts = CommandOptions::default(); + opts.include_dynamic = Some(true); + opts.top = Some(5); + opts.top_modules = Some(3); + opts.json = true; + let settings = ReplSettings::default(); + let (trace_opts, top_modules) = opts.resolve(&settings); + assert!(trace_opts.include_dynamic); + assert_eq!(trace_opts.top_n, 5); + assert_eq!(top_modules, 3); +} + +// --- diff_report --- + +#[test] +fn diff_report_against_self_is_zero() { + let p = common::TestProject::new(); + let mut session = Session::open(&p.entry, true).unwrap(); + let report = session + .diff_report(&p.entry, &TraceOptions::default(), 10) + .unwrap(); + // Diffing entry against itself: delta should be 0 + assert_eq!(report.weight_delta, 0); +} + +#[test] +fn diff_report_different_entry_has_delta() { + let p = common::TestProject::new(); + let b_entry = p.root().join("b.ts"); + let mut session = Session::open(&p.entry, true).unwrap(); + let report = session + .diff_report(&b_entry, &TraceOptions::default(), 10) + .unwrap(); + // b.ts has fewer dependencies than index.ts -- delta should be nonzero + assert_ne!(report.weight_delta, 0); +} + +#[test] +fn diff_report_json_fields() { + let p = common::TestProject::new(); + let b_entry = p.root().join("b.ts"); + let mut session = Session::open(&p.entry, true).unwrap(); + let report = session + .diff_report(&b_entry, &TraceOptions::default(), 10) + .unwrap(); + let json = report.to_json(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + assert!(v["weight_delta"].is_number()); + assert!(v["weight_a"].is_number()); + assert!(v["weight_b"].is_number()); + assert!(v["only_in_a"].is_array()); + assert!(v["only_in_b"].is_array()); + // Skipped fields + assert!(v.get("limit").is_none()); +} + +// --- imports / importers --- + +#[test] +fn imports_lists_direct_dependencies() { + let p = common::TestProject::new(); + let session = Session::open(&p.entry, true).unwrap(); + let imports = session.imports(&p.entry).unwrap(); + // index.ts imports a.ts and b.ts statically + let names: Vec = imports + .iter() + .map(|(p, _)| p.file_name().unwrap().to_string_lossy().to_string()) + .collect(); + assert!(names.contains(&"a.ts".to_string())); + assert!(names.contains(&"b.ts".to_string())); +} + +#[test] +fn importers_of_a_includes_index_and_b() { + let p = common::TestProject::new(); + let session = Session::open(&p.entry, true).unwrap(); + let a_path = p.root().join("a.ts"); + let importers = session.importers(&a_path).unwrap(); + let names: Vec = importers + .iter() + .map(|(p, _)| p.file_name().unwrap().to_string_lossy().to_string()) + .collect(); + assert!(names.contains(&"index.ts".to_string())); + assert!(names.contains(&"b.ts".to_string())); +} diff --git a/xtask/src/hooks.rs b/xtask/src/hooks.rs index 1318249..2f30f58 100644 --- a/xtask/src/hooks.rs +++ b/xtask/src/hooks.rs @@ -51,7 +51,7 @@ pub fn pre_commit() -> i32 { // Pre-commit can't verify commit SHA (commit doesn't exist yet). // Just check that an attestation exists — pre-push does the full verification. - let attestation_path = root.join(".git/perf-attestation.json"); + let attestation_path = git_dir(&root).join("perf-attestation.json"); if !attestation_path.exists() { blocked( "Perf-sensitive files staged but no attestation found.", @@ -82,7 +82,7 @@ pub fn pre_push() -> i32 { return 0; } - let attestation_path = root.join(".git/perf-attestation.json"); + let attestation_path = git_dir(&root).join("perf-attestation.json"); if !attestation_path.exists() { blocked( "Perf-sensitive files changed but no attestation found.", @@ -94,7 +94,7 @@ pub fn pre_push() -> i32 { let json = match std::fs::read_to_string(&attestation_path) { Ok(s) => s, Err(e) => { - eprintln!("BLOCKED: failed to read .git/perf-attestation.json: {e}"); + eprintln!("BLOCKED: failed to read perf-attestation.json: {e}"); return 1; } }; @@ -115,7 +115,7 @@ pub fn pre_push() -> i32 { /// Install git hooks by writing thin shell stubs to .git/hooks/. pub fn install_hooks() -> i32 { let root = project_root(); - let hooks_dir = root.join(".git/hooks"); + let hooks_dir = git_dir(&root).join("hooks"); if !hooks_dir.exists() { eprintln!("Not a git repository: {}", root.display()); @@ -227,6 +227,24 @@ fn project_root() -> PathBuf { PathBuf::from(String::from_utf8(output.stdout).unwrap().trim()) } +/// Resolve the `.git` directory (works in both normal repos and worktrees). +/// +/// In a normal repo this returns `/.git`. In a worktree it returns +/// the worktree-specific git dir (e.g. `/.git/worktrees/`). +pub fn git_dir(root: &Path) -> PathBuf { + let output = git_in(root, &["rev-parse", "--git-dir"]) + .output() + .unwrap_or_else(|e| panic!("failed to run git rev-parse --git-dir: {e}")); + assert!(output.status.success(), "git rev-parse --git-dir failed"); + let dir = String::from_utf8(output.stdout).unwrap().trim().to_string(); + let path = PathBuf::from(&dir); + if path.is_absolute() { + path + } else { + root.join(path) + } +} + fn current_branch(root: &Path) -> Option { let output = git_in(root, &["symbolic-ref", "--short", "HEAD"]) .output() diff --git a/xtask/src/perf_validate.rs b/xtask/src/perf_validate.rs index 06eb0e7..490d293 100644 --- a/xtask/src/perf_validate.rs +++ b/xtask/src/perf_validate.rs @@ -118,8 +118,7 @@ pub fn run(baseline: Option<&str>, benchmark_args: &[String]) -> i32 { eprintln!("Failed to write attestation: {e}"); return 1; } - println!("Attestation written to .git/perf-attestation.json"); - println!("You can now push."); + println!("Attestation written. You can now push."); } 0 @@ -243,7 +242,7 @@ fn write_attestation(root: &Path, required_benchmarks: &BTreeSet) -> Res }; let json = serde_json::to_string_pretty(&attestation).map_err(|e| format!("json: {e}"))?; - let path = root.join(".git/perf-attestation.json"); + let path = crate::hooks::git_dir(root).join("perf-attestation.json"); std::fs::write(&path, json).map_err(|e| format!("write {}: {e}", path.display()))?; Ok(())