diff --git a/.cargo/config.toml.iis b/.cargo/config.toml.iis new file mode 100644 index 000000000..168231c97 --- /dev/null +++ b/.cargo/config.toml.iis @@ -0,0 +1,6 @@ +[target.x86_64-unknown-linux-gnu] +linker = "/usr/pack/gcc-14.2.0-af/bin/gcc" + +[env] +CC = "/usr/pack/gcc-14.2.0-af/bin/gcc" +CXX = "/usr/pack/gcc-14.2.0-af/bin/g++" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 84d2fa64e..4420a4f4e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,15 +20,17 @@ jobs: - 1.87.0 # minimum supported version continue-on-error: ${{ matrix.rust == 'nightly' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 + with: + submodules: recursive - uses: dtolnay/rust-toolchain@stable with: toolchain: ${{ matrix.rust}} components: rustfmt - name: Build - run: cargo build + run: cargo build --all-features - name: Cargo Test - run: cargo test --all + run: cargo test --workspace --all-features - name: Format (fix with `cargo fmt`) run: cargo fmt -- --check - name: Run unit-tests @@ -38,14 +40,16 @@ jobs: test-windows: runs-on: windows-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 + with: + submodules: recursive - uses: dtolnay/rust-toolchain@stable with: toolchain: stable - name: Build - run: cargo build + run: cargo build --all-features - name: Cargo Test - run: cargo test --all + run: cargo test --workspace --all-features - name: Run unit-tests run: tests/run_all.sh shell: bash @@ -53,14 +57,16 @@ jobs: test-macos: runs-on: macos-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 + with: + submodules: recursive - uses: dtolnay/rust-toolchain@stable with: toolchain: stable - name: Build - run: cargo build + run: cargo build --all-features - name: Cargo Test - run: cargo test --all + run: cargo test --workspace --all-features - name: Run unit-tests run: tests/run_all.sh shell: bash @@ -69,7 +75,9 @@ jobs: name: Clippy runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 + with: + submodules: recursive - uses: dtolnay/rust-toolchain@stable with: toolchain: stable @@ -80,7 +88,7 @@ jobs: name: Unused Dependencies runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 - uses: dtolnay/rust-toolchain@stable with: toolchain: stable diff --git a/.github/workflows/cli_regression.yml b/.github/workflows/cli_regression.yml index a03e8f02e..bfdcf9bd7 100644 --- a/.github/workflows/cli_regression.yml +++ b/.github/workflows/cli_regression.yml @@ -8,35 +8,41 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 + with: + submodules: recursive - uses: dtolnay/rust-toolchain@stable with: toolchain: stable - name: Run CLI Regression - run: cargo test --test cli_regression -- --ignored + run: cargo test --all-features --test cli_regression -- --ignored env: BENDER_TEST_GOLDEN_BRANCH: ${{ github.base_ref }} test-windows: runs-on: windows-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 + with: + submodules: recursive - uses: dtolnay/rust-toolchain@stable with: toolchain: stable - name: Run CLI Regression - run: cargo test --test cli_regression -- --ignored + run: cargo test --all-features --test cli_regression -- --ignored env: BENDER_TEST_GOLDEN_BRANCH: ${{ github.base_ref }} test-macos: runs-on: macos-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 + with: + submodules: recursive - uses: dtolnay/rust-toolchain@stable with: toolchain: stable - name: Run CLI Regression - run: cargo test --test cli_regression -- --ignored + run: cargo test --all-features --test cli_regression -- --ignored env: BENDER_TEST_GOLDEN_BRANCH: ${{ github.base_ref }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index f7d81632e..2df026bd0 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -76,7 +76,7 @@ jobs: -v "$GITHUB_WORKSPACE/target/$platform/$tgtname:/source/target" \ --platform $full_platform \ $tgtname-$platform \ - cargo build --release; + cargo build --release --all-features; shell: bash - name: OS Create Package run: | @@ -121,7 +121,7 @@ jobs: -v "$GITHUB_WORKSPACE/target/$platform/$tgtname:/source/target" \ --platform $full_platform \ $tgtname-$platform \ - cargo build --release; + cargo build --release --all-features; shell: bash - name: OS Create Package run: | @@ -170,7 +170,7 @@ jobs: -v "$GITHUB_WORKSPACE/target/amd64:/source/target" \ --platform linux/amd64 \ manylinux-amd64 \ - cargo build --release; + cargo build --release --all-features; - name: GNU Create Package run: .github/scripts/package.sh amd64 shell: bash @@ -215,7 +215,7 @@ jobs: -v "$GITHUB_WORKSPACE/target/arm64:/source/target" \ --platform linux/arm64 \ manylinux-arm64 \ - cargo build --release; + cargo build --release --all-features; - name: GNU Create Package run: .github/scripts/package.sh arm64 shell: bash @@ -240,7 +240,7 @@ jobs: rustup target add aarch64-apple-darwin cargo install universal2 - name: MacOS Build - run: cargo-universal2 --release + run: cargo-universal2 --release --all-features - name: Get Artifact Name run: | if [[ "$GITHUB_REF" =~ ^refs/tags/v.*$ ]]; then \ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..cccf606d2 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "crates/bender-slang/vendor/slang"] + path = crates/bender-slang/vendor/slang + url = https://github.com/MikePopoloski/slang.git diff --git a/CHANGELOG.md b/CHANGELOG.md index df878c2d6..8a73c4436 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,9 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). ## Unreleased +### Added +- Add new `crates/bender-slang` crate that integrates the vendored Slang parser via a Rust/C++ bridge. +- Add new `pickle` command (behind feature `slang`) to parse and re-emit SystemVerilog sources. ## 0.30.0 - 2026-02-12 ### Added diff --git a/Cargo.lock b/Cargo.lock index 1459bcd84..68d036af7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -114,6 +114,7 @@ version = "0.30.0" dependencies = [ "assert_cmd", "async-recursion", + "bender-slang", "blake2", "clap", "clap_complete", @@ -143,6 +144,16 @@ dependencies = [ "walkdir", ] +[[package]] +name = "bender-slang" +version = "0.1.0" +dependencies = [ + "cmake", + "cxx", + "cxx-build", + "thiserror", +] + [[package]] name = "bitflags" version = "2.10.0" @@ -288,6 +299,26 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" +[[package]] +name = "cmake" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" +dependencies = [ + "cc", +] + +[[package]] +name = "codespan-reporting" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af491d569909a7e4dee0ad7db7f5341fef5c614d5b8ec8cf765732aba3cff681" +dependencies = [ + "serde", + "termcolor", + "unicode-width 0.2.2", +] + [[package]] name = "colorchoice" version = "1.0.4" @@ -357,6 +388,68 @@ dependencies = [ "typenum", ] +[[package]] +name = "cxx" +version = "1.0.194" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "747d8437319e3a2f43d93b341c137927ca70c0f5dabeea7a005a73665e247c7e" +dependencies = [ + "cc", + "cxx-build", + "cxxbridge-cmd", + "cxxbridge-flags", + "cxxbridge-macro", + "foldhash 0.2.0", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.194" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0f4697d190a142477b16aef7da8a99bfdc41e7e8b1687583c0d23a79c7afc1e" +dependencies = [ + "cc", + "codespan-reporting", + "indexmap", + "proc-macro2", + "quote", + "scratch", + "syn", +] + +[[package]] +name = "cxxbridge-cmd" +version = "1.0.194" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0956799fa8678d4c50eed028f2de1c0552ae183c76e976cf7ca8c4e36a7c328" +dependencies = [ + "clap", + "codespan-reporting", + "indexmap", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.194" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23384a836ab4f0ad98ace7e3955ad2de39de42378ab487dc28d3990392cb283a" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.194" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6acc6b5822b9526adfb4fc377b67128fdd60aac757cc4a741a6278603f763cf" +dependencies = [ + "indexmap", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "deunicode" version = "1.6.2" @@ -459,6 +552,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "futures" version = "0.3.31" @@ -618,7 +717,7 @@ version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ - "foldhash", + "foldhash 0.1.5", ] [[package]] @@ -801,6 +900,15 @@ dependencies = [ "libc", ] +[[package]] +name = "link-cplusplus" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f78c730aaa7d0b9336a299029ea49f9ee53b0ed06e9202e8cb7db9bae7b8c82" +dependencies = [ + "cc", +] + [[package]] name = "linux-raw-sys" version = "0.11.0" @@ -1240,6 +1348,12 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "scratch" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d68f2ec51b097e4c1a75b681a8bec621909b5e91f15bb7b840c4f2f7b01148b2" + [[package]] name = "semver" version = "1.0.27" @@ -1467,6 +1581,15 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + [[package]] name = "termtree" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 91eb08861..eeca629ff 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,7 +13,12 @@ license = "Apache-2.0 OR MIT" edition = "2024" rust-version = "1.87.0" +[workspace] +members = ["crates/bender-slang"] + [dependencies] +bender-slang = { path = "crates/bender-slang", optional = true} + serde = { version = "1", features = ["derive"] } serde_yaml_ng = "0.10" serde_json = "1" @@ -49,3 +54,6 @@ dunce = "1.0.4" [dev-dependencies] assert_cmd = "2.1.1" pretty_assertions = "1.4" + +[features] +slang = ["dep:bender-slang"] diff --git a/README.md b/README.md index 052d831d1..f4ae5aecc 100644 --- a/README.md +++ b/README.md @@ -58,6 +58,12 @@ cargo install bender ``` If you need a specific version of Bender (e.g., `0.21.0`), append ` --version 0.21.0` to that command. +To enable optional features (including the Slang-backed `pickle` command), install with: +```sh +cargo install bender --all-features +``` +This may increase build time and additional build dependencies. + To install Bender system-wide, you can simply copy the binary you have obtained from one of the above methods to one of the system directories on your `PATH`. Even better, some Linux distributions have Bender in their repositories. We are currently aware of: ### [ArchLinux ![aur-shield](https://img.shields.io/aur/version/bender)][aur-bender] @@ -518,6 +524,30 @@ Supported formats: Furthermore, similar flags to the `sources` command exist. +### `pickle` --- Parse and rewrite SystemVerilog sources with Slang + +The `bender pickle` command parses SystemVerilog sources with Slang and prints the resulting source again. It supports optional renaming and trimming of unreachable files for specified top modules. + +This command is only available when Bender is built with Slang support (for example via `cargo install bender --all-features`). + +Useful options: +- `--top `: Trim output to files reachable from one or more top modules. +- `--prefix ` / `--suffix `: Add a prefix and/or suffix to renamed symbols. +- `--exclude-rename `: Exclude specific symbols from renaming. +- `--ast-json`: Emit AST JSON instead of source code. +- `--expand-macros`, `--strip-comments`, `--squash-newlines`: Control output formatting. +- `-I `, `-D `: Add extra include directories and preprocessor defines. + +Examples: + +```sh +# Keep only files reachable from top module `top`. +bender pickle --top my_top + +# Rename symbols, but keep selected names unchanged. +bender pickle --top my_top --prefix p_ --suffix _s --exclude-rename my_top +``` + ### `update` --- Re-resolve dependencies diff --git a/crates/bender-slang/Cargo.toml b/crates/bender-slang/Cargo.toml new file mode 100644 index 000000000..b660dcca0 --- /dev/null +++ b/crates/bender-slang/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "bender-slang" +version = "0.1.0" +edition = "2024" + +[dependencies] +cxx = "1.0.194" +thiserror = "2.0.12" + +[build-dependencies] +cmake = "0.1.57" +cxx-build = "1.0.194" diff --git a/crates/bender-slang/README.md b/crates/bender-slang/README.md new file mode 100644 index 000000000..a105eb6de --- /dev/null +++ b/crates/bender-slang/README.md @@ -0,0 +1,19 @@ +# bender-slang + +`bender-slang` provides the C++ bridge between `bender` and the vendored [Slang](https://github.com/MikePopoloski/slang) parser infrastructure. + +It is used by Bender's optional Slang-backed features, most notably the `pickle` command. + +## IIS Environment Setup + +In the IIS environment on Linux, a newer GCC toolchain is required to build `bender-slang`. Simply copy the provided Cargo configuration file to use the appropriate toolchain: + +```sh +cp .cargo/config.toml.iis .cargo/config.toml +``` + +Then, you can build or install bender with the usual Cargo command: + +```sh +cargo install --path . --features slang +``` diff --git a/crates/bender-slang/build.rs b/crates/bender-slang/build.rs new file mode 100644 index 000000000..7db0396f7 --- /dev/null +++ b/crates/bender-slang/build.rs @@ -0,0 +1,137 @@ +// Copyright (c) 2025 ETH Zurich +// Tim Fischer + +fn main() { + let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap(); + let target_env = std::env::var("CARGO_CFG_TARGET_ENV").unwrap(); + let build_profile = std::env::var("PROFILE").unwrap(); + let cmake_profile = match (target_env.as_str(), build_profile.as_str()) { + // Rust MSVC links against the release CRT; + // using C++ Debug CRT (/MDd) causes LNK2038 mismatches. + ("msvc", _) => "RelWithDebInfo", + (_, "debug") => "Debug", + _ => "Release", + }; + + // Create the configuration builder + let mut slang_lib = cmake::Config::new("vendor/slang"); + + // Common defines to give to both Slang and the Bridge + // Note: It is very important to provide the same defines and flags + // to both the Slang library build and the C++ bridge build to avoid + // ABI incompatibilities. Otherwise, this will cause segfaults at runtime. + let mut common_cxx_defines = vec![ + ("SLANG_USE_MIMALLOC", "1"), + ("SLANG_USE_THREADS", "1"), + ("SLANG_BOOST_SINGLE_HEADER", "1"), + ]; + + // Add debug define if in debug build + if build_profile == "debug" && (target_env != "msvc") { + common_cxx_defines.push(("SLANG_DEBUG", "1")); + common_cxx_defines.push(("SLANG_ASSERT_ENABLED", "1")); + }; + + // Common compiler flags + let common_cxx_flags = if target_env == "msvc" { + vec!["/std:c++20", "/EHsc", "/utf-8"] + } else { + vec!["-std=c++20"] + }; + + // Apply cmake configuration for Slang library + slang_lib + .define("SLANG_INCLUDE_TESTS", "OFF") + .define("SLANG_INCLUDE_TOOLS", "OFF") + // Forces installation into 'lib' instead of 'lib64' on some systems. + .define("CMAKE_INSTALL_LIBDIR", "lib") + // Disable finding system-installed packages, we want to fetch and build them from source. + .define("CMAKE_DISABLE_FIND_PACKAGE_fmt", "ON") + .define("CMAKE_DISABLE_FIND_PACKAGE_mimalloc", "ON") + .define("CMAKE_DISABLE_FIND_PACKAGE_Boost", "ON") + .profile(cmake_profile); + + // Apply common defines and flags + for (def, value) in common_cxx_defines.iter() { + slang_lib.define(def, *value); + slang_lib.cxxflag(format!("-D{}={}", def, value)); + } + for flag in common_cxx_flags.iter() { + slang_lib.cxxflag(flag); + } + + // Build the slang library + let dst = slang_lib.build(); + let lib_dir = dst.join("lib"); + + // Configure Linker to find Slang static library + println!("cargo:rustc-link-search=native={}", lib_dir.display()); + println!("cargo:rustc-link-lib=static=svlang"); + + // Link the additional libraries based on build profile. + let (fmt_lib, mimalloc_lib) = match (target_env.as_str(), build_profile.as_str()) { + ("msvc", _) => ("fmt", "mimalloc"), + (_, "debug") => ("fmtd", "mimalloc-debug"), + _ => ("fmt", "mimalloc"), + }; + + println!("cargo:rustc-link-lib=static={fmt_lib}"); + println!("cargo:rustc-link-lib=static={mimalloc_lib}"); + + if target_os == "windows" { + println!("cargo:rustc-link-lib=advapi32"); + } + + // Compile the C++ Bridge + let mut bridge_build = cxx_build::bridge("src/lib.rs"); + bridge_build + .file("cpp/slang_bridge.cpp") + .flag_if_supported("-std=c++20") + .include("vendor/slang/include") + .include("vendor/slang/external") + .include(dst.join("include")); + + // Linux: we try static linking of libstdc++ to avoid issues on older distros. + if target_os == "linux" { + // Determine the C++ compiler to use. Respect the CXX environment variable if set. + let compiler = std::env::var("CXX").unwrap_or_else(|_| "g++".to_string()); + // We search for the static libstdc++ file using g++ + let output = std::process::Command::new(&compiler) + .args(&["-print-file-name=libstdc++.a"]) + .output() + .expect("Failed to run g++"); + + if output.status.success() { + let path_str = std::str::from_utf8(&output.stdout).unwrap().trim(); + let path = std::path::Path::new(path_str); + + if path.is_absolute() && path.exists() { + if let Some(parent) = path.parent() { + // Add the directory containing libstdc++.a to the link search path + println!("cargo:rustc-link-search=native={}", parent.display()); + } + + bridge_build.cpp_set_stdlib(None); + println!("cargo:rustc-link-lib=static=stdc++"); + } else { + println!( + "cargo:warning=Could not find static libstdc++.a, falling back to dynamic linking" + ); + } + } + } + + // Apply common defines and flags to the bridge build as well + for (def, value) in common_cxx_defines.iter() { + bridge_build.define(def, *value); + } + for flag in common_cxx_flags.iter() { + bridge_build.flag(flag); + } + + bridge_build.compile("slang-bridge"); + + println!("cargo:rerun-if-changed=src/lib.rs"); + println!("cargo:rerun-if-changed=cpp/slang_bridge.cpp"); + println!("cargo:rerun-if-changed=cpp/slang_bridge.h"); +} diff --git a/crates/bender-slang/cpp/slang_bridge.cpp b/crates/bender-slang/cpp/slang_bridge.cpp new file mode 100644 index 000000000..029548552 --- /dev/null +++ b/crates/bender-slang/cpp/slang_bridge.cpp @@ -0,0 +1,355 @@ +// Copyright (c) 2025 ETH Zurich +// Tim Fischer + +#include "slang_bridge.h" + +#include "bender-slang/src/lib.rs.h" +#include "slang/diagnostics/DiagnosticEngine.h" +#include "slang/diagnostics/TextDiagnosticClient.h" +#include "slang/syntax/CSTSerializer.h" +#include "slang/syntax/SyntaxPrinter.h" +#include "slang/syntax/SyntaxVisitor.h" +#include "slang/text/Json.h" + +#include +#include +#include +#include + +using namespace slang; +using namespace slang::driver; +using namespace slang::syntax; +using namespace slang::parsing; + +using std::memcpy; +using std::shared_ptr; +using std::string; +using std::string_view; +using std::vector; + +// Create a new SlangContext instance +std::unique_ptr new_slang_context() { return std::make_unique(); } + +SlangContext::SlangContext() : diagEngine(sourceManager), diagClient(std::make_shared()) { + diagEngine.addClient(diagClient); +} + +// Set the include paths for the preprocessor +void SlangContext::set_includes(const rust::Vec& incs) { + ppOptions.additionalIncludePaths.clear(); + for (const auto& inc : incs) { + ppOptions.additionalIncludePaths.emplace_back(std::string(inc)); + } +} + +// Sets the preprocessor defines +void SlangContext::set_defines(const rust::Vec& defs) { + ppOptions.predefines.clear(); + for (const auto& def : defs) { + ppOptions.predefines.emplace_back(std::string(def)); + } +} + +// Parses the given file and returns a syntax tree, if successful +std::shared_ptr SlangContext::parse_file(rust::Str path) { + string_view pathView(path.data(), path.size()); + Bag options; + options.set(ppOptions); + + auto result = SyntaxTree::fromFile(pathView, sourceManager, options); + + if (!result) { + auto& err = result.error(); + std::string msg = "System Error loading '" + std::string(err.second) + "': " + err.first.message(); + throw std::runtime_error(msg); + } + + auto tree = *result; + diagClient->clear(); + diagEngine.clearIncludeStack(); + + bool hasErrors = false; + for (const auto& diag : tree->diagnostics()) { + hasErrors |= diag.isError(); + diagEngine.issue(diag); + } + + if (hasErrors) { + std::string rendered = diagClient->getString(); + if (rendered.empty()) { + rendered = "Failed to parse '" + std::string(pathView) + "'."; + } + throw std::runtime_error(rendered); + } + + return tree; +} + +std::unique_ptr SlangContext::parse_files(const rust::Vec& paths) { + auto out = std::make_unique(); + out->trees.reserve(paths.size()); + for (const auto& path : paths) { + out->trees.push_back(parse_file(path)); + } + return out; +} + +// Rewriter that adds prefix/suffix to module and instantiated hierarchy names +class SuffixPrefixRewriter : public SyntaxRewriter { + public: + SuffixPrefixRewriter(string_view prefix, string_view suffix, const std::unordered_set& excludes) + : prefix(prefix), suffix(suffix), excludes(excludes) {} + + // Helper to allocate and build renamed string with prefix/suffix + string_view rename(string_view name) { + if (excludes.count(std::string(name))) { + return name; + } + size_t len = prefix.size() + name.size() + suffix.size(); + char* mem = (char*)alloc.allocate(len, 1); + memcpy(mem, prefix.data(), prefix.size()); + memcpy(mem + prefix.size(), name.data(), name.size()); + memcpy(mem + prefix.size() + name.size(), suffix.data(), suffix.size()); + return string_view(mem, len); + } + + // Renames "module foo;" -> "module foo;" + // Note: Handles packages and interfaces too. + void handle(const ModuleDeclarationSyntax& node) { + if (node.header->name.isMissing()) + return; + + // Create a new name token + auto newName = rename(node.header->name.valueText()); + auto newNameToken = makeId(newName, node.header->name.trivia()); + + // Clone the header and update the name + ModuleHeaderSyntax* newHeader = deepClone(*node.header, alloc); + newHeader->name = newNameToken; + + // Replace the old header with the new one + replace(*node.header, *newHeader); + + // Continue visiting child nodes + visitDefault(node); + } + + // Renames "foo i_foo();" -> "foo i_foo();" + // Note: Handles modules and interfaces. + void handle(const HierarchyInstantiationSyntax& node) { + // Check to make sure we are dealing with an identifier + // and not a built-in type e.g. `initial foo();` + if (node.type.kind == parsing::TokenKind::Identifier) { + + // Create a new name token + auto newName = rename(node.type.valueText()); + auto newNameToken = makeId(newName); + + // Clone the node and update the type token + HierarchyInstantiationSyntax* newNode = deepClone(node, alloc); + newNode->type = newNameToken; + + // Replace the old node with the new one + replace(node, *newNode, true); + } + + // Continue visiting child nodes + visitDefault(node); + } + + // Renames "import foo;" -> "import foo;" + void handle(const PackageImportItemSyntax& node) { + if (node.package.isMissing()) + return; + + auto newName = rename(node.package.valueText()); + auto newNameToken = makeId(newName, node.package.trivia()); + + PackageImportItemSyntax* newNode = deepClone(node, alloc); + newNode->package = newNameToken; + + replace(node, *newNode); + visitDefault(node); + } + + // Renames "virtual MyIntf foo;" -> "virtual MyIntf foo;" + void handle(const VirtualInterfaceTypeSyntax& node) { + if (node.name.isMissing()) + return; + + auto newName = rename(node.name.valueText()); + auto newNameToken = makeId(newName, node.name.trivia()); + + VirtualInterfaceTypeSyntax* newNode = deepClone(node, alloc); + newNode->name = newNameToken; + + replace(node, *newNode); + visitDefault(node); + } + + // Renames "foo::bar" -> "foo::bar" + void handle(const ScopedNameSyntax& node) { + // Only rename if the left side is a simple identifier (e.g., a package name) + // We ignore nested calls or parameterized classes for now. + if (node.left->kind == SyntaxKind::IdentifierName) { + auto& leftNode = node.left->as(); + auto name = leftNode.identifier.valueText(); + + // Skip built-in keywords that look like scopes + if (name != "$unit" && name != "local" && name != "super" && name != "this") { + auto newName = rename(name); + auto newNameToken = makeId(newName, leftNode.identifier.trivia()); + + // Clone the left node and update identifier + IdentifierNameSyntax* newLeft = deepClone(leftNode, alloc); + newLeft->identifier = newNameToken; + + // Clone the scoped node and attach new left + ScopedNameSyntax* newNode = deepClone(node, alloc); + newNode->left = newLeft; + + replace(node, *newNode); + } + } + + // Visit children to handle recursive scopes + // e.g., OuterPkg::InnerPkg::Item + visitDefault(node); + } + + private: + string_view prefix; + string_view suffix; + const std::unordered_set& excludes; +}; + +// Transform the given syntax tree by renaming modules and instantiated hierarchy names with the specified prefix/suffix +std::shared_ptr rename(std::shared_ptr tree, rust::Str prefix, rust::Str suffix, + const rust::Vec& excludes) { + std::string_view p(prefix.data(), prefix.size()); + std::string_view s(suffix.data(), suffix.size()); + + std::unordered_set excludeSet; + for (const auto& e : excludes) { + excludeSet.insert(std::string(e)); + } + + // SuffixPrefixRewriter is defined in the .cpp file as before + SuffixPrefixRewriter rewriter(p, s, excludeSet); + return rewriter.transform(tree); +} + +// Print the given syntax tree with specified options +rust::String print_tree(const shared_ptr tree, SlangPrintOpts options) { + + // Set up the printer with options + SyntaxPrinter printer(tree->sourceManager()); + + printer.setIncludeDirectives(true); + printer.setExpandIncludes(true); + printer.setExpandMacros(options.expand_macros); + printer.setSquashNewlines(options.squash_newlines); + printer.setIncludeComments(options.include_comments); + + // Print the tree root and return as rust::String + printer.print(tree->root()); + return rust::String(printer.str()); +} + +// Dumps the AST/CST to a JSON string +rust::String dump_tree_json(std::shared_ptr tree) { + JsonWriter writer; + writer.setPrettyPrint(true); + + // CSTSerializer is the class Slang uses to convert AST -> JSON + CSTSerializer serializer(writer); + + // Serialize the specific tree root + serializer.serialize(*tree); + + // Convert string_view to rust::String + return rust::String(std::string(writer.view())); +} + +std::unique_ptr new_syntax_trees() { return std::make_unique(); } + +void append_trees(SyntaxTrees& dst, const SyntaxTrees& src) { + dst.trees.reserve(dst.trees.size() + src.trees.size()); + for (const auto& tree : src.trees) { + dst.trees.push_back(tree); + } +} + +rust::Vec reachable_tree_indices(const SyntaxTrees& trees, const rust::Vec& tops) { + const auto& treeVec = trees.trees; + + // Build a mapping from declared symbol names to the index of the tree that declares them + std::unordered_map nameToTreeIndex; + for (size_t i = 0; i < treeVec.size(); ++i) { + const auto& metadata = treeVec[i]->getMetadata(); + for (auto name : metadata.getDeclaredSymbols()) { + nameToTreeIndex.emplace(name, i); + } + } + + // Build a dependency graph where each tree points to the trees that declare symbols it references + std::vector> deps(treeVec.size()); + for (size_t i = 0; i < treeVec.size(); ++i) { + const auto& metadata = treeVec[i]->getMetadata(); + std::unordered_set seen; + for (auto ref : metadata.getReferencedSymbols()) { + auto it = nameToTreeIndex.find(ref); + // Avoid duplicate dependencies in case of multiple references to the same symbol + if (it != nameToTreeIndex.end() && seen.insert(it->second).second) { + deps[i].push_back(it->second); + } + } + } + + // Map the top module names to their corresponding tree indices + std::vector startIndices; + startIndices.reserve(tops.size()); + for (const auto& top : tops) { + std::string_view name(top.data(), top.size()); + auto it = nameToTreeIndex.find(name); + if (it == nameToTreeIndex.end()) { + throw std::runtime_error("Top module not found in any parsed source file: " + std::string(name)); + } else { + startIndices.push_back(it->second); + } + } + + // Perform a DFS from the top modules to find all reachable trees + std::vector reachable(treeVec.size(), false); + std::function dfs = [&](size_t index) { + if (reachable[index]) { + return; + } + reachable[index] = true; + for (auto dep : deps[index]) { + dfs(dep); + } + }; + + for (auto start : startIndices) { + dfs(start); + } + + // Collect the indices of reachable trees and return as rust::Vec + rust::Vec result; + for (size_t i = 0; i < reachable.size(); ++i) { + if (reachable[i]) { + result.push_back(static_cast(i)); + } + } + return result; +} + +std::size_t tree_count(const SyntaxTrees& trees) { return trees.trees.size(); } + +std::shared_ptr tree_at(const SyntaxTrees& trees, std::size_t index) { + if (index >= trees.trees.size()) { + throw std::runtime_error("Tree index out of bounds."); + } + return trees.trees[index]; +} diff --git a/crates/bender-slang/cpp/slang_bridge.h b/crates/bender-slang/cpp/slang_bridge.h new file mode 100644 index 000000000..9e2ff59d9 --- /dev/null +++ b/crates/bender-slang/cpp/slang_bridge.h @@ -0,0 +1,58 @@ +// Copyright (c) 2025 ETH Zurich +// Tim Fischer + +#ifndef BENDER_SLANG_BRIDGE_H +#define BENDER_SLANG_BRIDGE_H + +#include "rust/cxx.h" +#include "slang/diagnostics/DiagnosticEngine.h" +#include "slang/diagnostics/TextDiagnosticClient.h" +#include "slang/driver/Driver.h" +#include "slang/syntax/SyntaxTree.h" + +#include +#include +#include +#include +#include + +struct SlangPrintOpts; +struct SyntaxTrees; + +class SlangContext { + public: + SlangContext(); + + void set_includes(const rust::Vec& includes); + void set_defines(const rust::Vec& defines); + + std::shared_ptr parse_file(rust::Str path); + std::unique_ptr parse_files(const rust::Vec& paths); + + private: + slang::SourceManager sourceManager; + slang::parsing::PreprocessorOptions ppOptions; + slang::DiagnosticEngine diagEngine; + std::shared_ptr diagClient; +}; + +std::unique_ptr new_slang_context(); + +std::shared_ptr rename(std::shared_ptr tree, rust::Str prefix, + rust::Str suffix, const rust::Vec& excludes); + +rust::String print_tree(std::shared_ptr tree, SlangPrintOpts options); + +rust::String dump_tree_json(std::shared_ptr tree); + +struct SyntaxTrees { + std::vector> trees; +}; + +std::unique_ptr new_syntax_trees(); +void append_trees(SyntaxTrees& dst, const SyntaxTrees& src); +rust::Vec reachable_tree_indices(const SyntaxTrees& trees, const rust::Vec& tops); +std::size_t tree_count(const SyntaxTrees& trees); +std::shared_ptr tree_at(const SyntaxTrees& trees, std::size_t index); + +#endif // BENDER_SLANG_BRIDGE_H diff --git a/crates/bender-slang/src/lib.rs b/crates/bender-slang/src/lib.rs new file mode 100644 index 000000000..e5e070be4 --- /dev/null +++ b/crates/bender-slang/src/lib.rs @@ -0,0 +1,261 @@ +// Copyright (c) 2025 ETH Zurich +// Tim Fischer + +use cxx::{SharedPtr, UniquePtr}; +use thiserror::Error; + +pub use ffi::SlangPrintOpts; + +pub type Result = std::result::Result; + +#[derive(Debug, Error)] +pub enum SlangError { + #[error("Failed to parse file: {message}")] + Parse { message: String }, + #[error("Failed to parse files: {message}")] + ParseFiles { message: String }, + #[error("Failed to trim files by top modules: {message}")] + TrimByTop { message: String }, + #[error("Failed to access parsed syntax tree: {message}")] + TreeAccess { message: String }, +} + +#[cxx::bridge] +mod ffi { + /// Options for the syntax printer + #[derive(Clone, Copy)] + struct SlangPrintOpts { + expand_macros: bool, + include_comments: bool, + squash_newlines: bool, + } + + unsafe extern "C++" { + include!("bender-slang/cpp/slang_bridge.h"); + // Include Slang header to define SyntaxTree type for CXX + include!("slang/syntax/SyntaxTree.h"); + + /// Opaque type for the Slang Context + type SlangContext; + + /// Opaque type for the Slang SyntaxTree + #[namespace = "slang::syntax"] + type SyntaxTree; + /// Opaque type for a batch of parsed syntax trees. + type SyntaxTrees; + + /// Create a new persistent context + fn new_slang_context() -> UniquePtr; + + /// Set the include directories + fn set_includes(self: Pin<&mut SlangContext>, includes: &Vec); + /// Set the preprocessor defines + fn set_defines(self: Pin<&mut SlangContext>, defines: &Vec); + + /// Parse all added sources. Returns a syntax tree on success, or an error message on failure. + fn parse_file(self: Pin<&mut SlangContext>, path: &str) -> Result>; + /// Parse multiple source files and return a batch of syntax trees. + fn parse_files( + self: Pin<&mut SlangContext>, + paths: &Vec, + ) -> Result>; + /// Create an empty syntax-tree batch. + fn new_syntax_trees() -> UniquePtr; + /// Appends trees from src into dst. + fn append_trees(dst: Pin<&mut SyntaxTrees>, src: &SyntaxTrees); + /// Computes reachable tree indices from the provided top names. + fn reachable_tree_indices(trees: &SyntaxTrees, tops: &Vec) -> Result>; + /// Returns the number of trees in the batch. + fn tree_count(trees: &SyntaxTrees) -> usize; + /// Returns tree at index from the batch. + fn tree_at(trees: &SyntaxTrees, index: usize) -> Result>; + + /// Rename names in the syntax tree with a given prefix and suffix + fn rename( + tree: SharedPtr, + prefix: &str, + suffix: &str, + excludes: &Vec, + ) -> SharedPtr; + + /// Print a specific tree + fn print_tree(tree: SharedPtr, options: SlangPrintOpts) -> String; + + /// Dump the syntax tree as JSON for debugging purposes + fn dump_tree_json(tree: SharedPtr) -> String; + } +} + +/// Wrapper around an opaque Slang syntax tree. +pub struct SyntaxTree { + inner: SharedPtr, +} + +impl Clone for SyntaxTree { + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + } + } +} + +impl SyntaxTree { + /// Renames all names in the syntax tree with the given prefix and suffix + pub fn rename( + &self, + prefix: Option<&str>, + suffix: Option<&str>, + excludes: &Vec, + ) -> Self { + if prefix.is_none() && suffix.is_none() { + return self.clone(); + } + Self { + inner: ffi::rename( + self.inner.clone(), + prefix.unwrap_or(""), + suffix.unwrap_or(""), + excludes, + ), + } + } + + /// Displays the syntax tree as a string with the given options + pub fn display(&self, options: SlangPrintOpts) -> String { + ffi::print_tree(self.inner.clone(), options) + } + + pub fn as_debug(&self) -> String { + ffi::dump_tree_json(self.inner.clone()) + } +} + +impl std::fmt::Display for SyntaxTree { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let options = SlangPrintOpts { + expand_macros: false, + include_comments: true, + squash_newlines: false, + }; + f.write_str(&self.display(options)) + } +} + +impl std::fmt::Debug for SyntaxTree { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.as_debug()) + } +} + +/// Wrapper around an opaque Slang context. +pub struct SlangContext { + inner: UniquePtr, +} + +/// Wrapper around an opaque batch of syntax trees. +pub struct SyntaxTrees { + inner: UniquePtr, +} + +impl SyntaxTrees { + /// Creates an empty syntax-tree batch. + pub fn new() -> Self { + Self { + inner: ffi::new_syntax_trees(), + } + } + + /// Appends all trees from src into self. + pub fn append_trees(&mut self, src: &SyntaxTrees) { + ffi::append_trees(self.inner.pin_mut(), src.inner.as_ref().unwrap()); + } + + /// Returns tree count in this batch. + pub fn len(&self) -> usize { + ffi::tree_count(self.inner.as_ref().unwrap()) + } + + /// Returns true if the batch contains no trees. + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns indices reachable from top names. + pub fn reachable_indices(&self, tops: &Vec) -> Result> { + let indices = + ffi::reachable_tree_indices(self.inner.as_ref().unwrap(), tops).map_err(|cause| { + SlangError::TrimByTop { + message: cause.to_string(), + } + })?; + Ok(indices.into_iter().map(|i| i as usize).collect()) + } + + /// Returns a tree at the provided index. + pub fn tree_at(&self, index: usize) -> Result { + Ok(SyntaxTree { + inner: ffi::tree_at(self.inner.as_ref().unwrap(), index).map_err(|cause| { + SlangError::TreeAccess { + message: cause.to_string(), + } + })?, + }) + } +} + +impl Default for SyntaxTrees { + fn default() -> Self { + Self::new() + } +} + +impl SlangContext { + /// Creates a new Slang session. + pub fn new() -> Self { + Self { + inner: ffi::new_slang_context(), + } + } + + /// Sets the include directories. + pub fn set_includes(&mut self, includes: &Vec) -> &mut Self { + self.inner.pin_mut().set_includes(includes); + self + } + + /// Sets the preprocessor defines. + pub fn set_defines(&mut self, defines: &Vec) -> &mut Self { + self.inner.pin_mut().set_defines(defines); + self + } + + /// Parses a source file and returns the syntax tree. + pub fn parse(&mut self, path: &str) -> Result { + Ok(SyntaxTree { + inner: self + .inner + .pin_mut() + .parse_file(path) + .map_err(|cause| SlangError::Parse { + message: cause.to_string(), + })?, + }) + } + + /// Parses multiple source files and returns a batch of syntax trees. + pub fn parse_files(&mut self, paths: &Vec) -> Result { + Ok(SyntaxTrees { + inner: self.inner.pin_mut().parse_files(paths).map_err(|cause| { + SlangError::ParseFiles { + message: cause.to_string(), + } + })?, + }) + } +} + +impl Default for SlangContext { + fn default() -> Self { + Self::new() + } +} diff --git a/crates/bender-slang/vendor/slang b/crates/bender-slang/vendor/slang new file mode 160000 index 000000000..ace09c5d7 --- /dev/null +++ b/crates/bender-slang/vendor/slang @@ -0,0 +1 @@ +Subproject commit ace09c5d7c9f4e28eed654d2f353c6dc792ebf67 diff --git a/src/cli.rs b/src/cli.rs index 6f8ee5938..f14eda4bd 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -106,6 +106,8 @@ enum Commands { Init, Snapshot(cmd::snapshot::SnapshotArgs), Audit(cmd::audit::AuditArgs), + #[cfg(feature = "slang")] + Pickle(cmd::pickle::PickleArgs), #[command(external_subcommand)] Plugin(Vec), } @@ -329,6 +331,8 @@ pub fn main() -> Result<()> { Commands::Fusesoc(args) => cmd::fusesoc::run(&sess, &args), Commands::Snapshot(args) => cmd::snapshot::run(&sess, &args), Commands::Audit(args) => cmd::audit::run(&sess, &args), + #[cfg(feature = "slang")] + Commands::Pickle(args) => cmd::pickle::run(&sess, args), Commands::Plugin(args) => { let (plugin_name, plugin_args) = args .split_first() diff --git a/src/cmd.rs b/src/cmd.rs index 8399f03b6..bbae6227d 100644 --- a/src/cmd.rs +++ b/src/cmd.rs @@ -19,6 +19,8 @@ pub mod init; pub mod packages; pub mod parents; pub mod path; +#[cfg(feature = "slang")] +pub mod pickle; pub mod script; pub mod snapshot; pub mod sources; diff --git a/src/cmd/pickle.rs b/src/cmd/pickle.rs new file mode 100644 index 000000000..0bdd5890e --- /dev/null +++ b/src/cmd/pickle.rs @@ -0,0 +1,263 @@ +// Copyright (c) 2025 ETH Zurich +// Tim Fischer + +//! The `pickle` subcommand. + +use std::fs::File; +use std::io::{BufWriter, Write}; +use std::path::Path; + +use clap::Args; +use indexmap::{IndexMap, IndexSet}; +use tokio::runtime::Runtime; + +use crate::cmd::sources::get_passed_targets; +use crate::config::{Validate, ValidationContext}; +use crate::diagnostic::Warnings; +use crate::error::*; +use crate::sess::{Session, SessionIo}; +use crate::src::{SourceFile, SourceGroup, SourceType}; +use crate::target::TargetSet; + +use bender_slang::{SlangContext, SlangPrintOpts, SyntaxTrees}; + +/// Pickle files +#[derive(Args, Debug)] +pub struct PickleArgs { + /// Additional source files to pickle, which are not part of the manifest. + files: Vec, + + /// The output file (defaults to stdout) + #[arg(short, long)] + output: Option, + + /// Only include sources that match the given target + #[arg(short, long)] + pub target: Vec, + + /// Specify package to show sources for + #[arg(short, long)] + pub package: Vec, + + /// Specify package to exclude from sources + #[arg(long)] + pub exclude: Vec, + + /// Exclude all dependencies, i.e. only top level or specified package(s) + #[arg(long)] + pub no_deps: bool, + + /// Additional include directory, which are not part of the manifest. + #[arg(short = 'I')] + include_dir: Vec, + + /// Additional preprocessor definition, which are not part of the manifest. + #[arg(short = 'D')] + define: Vec, + + /// One or more top-level modules used to trim unreachable parsed files. + #[arg(long, help_heading = "Slang Options")] + top: Vec, + + /// A prefix to add to all names (modules, packages, interfaces) + #[arg(long, help_heading = "Slang Options")] + prefix: Option, + + /// A suffix to add to all names (modules, packages, interfaces) + #[arg(long, help_heading = "Slang Options")] + suffix: Option, + + /// Names to exclude from renaming (modules, packages, interfaces) + #[arg(long, help_heading = "Slang Options")] + exclude_rename: Vec, + + /// Expand macros in the output + #[arg(long, help_heading = "Slang Options")] + expand_macros: bool, + + /// Strip comments from the output + #[arg(long, help_heading = "Slang Options")] + strip_comments: bool, + + /// Squash newlines in the output + #[arg(long, help_heading = "Slang Options")] + squash_newlines: bool, + + /// Dump the syntax trees as JSON instead of the source code + #[arg(long, help_heading = "Slang Options")] + ast_json: bool, +} + +/// Execute the `pickle` subcommand. +pub fn run(sess: &Session, args: PickleArgs) -> Result<()> { + // Load the source files + let rt = Runtime::new()?; + let io = SessionIo::new(sess); + let srcs = rt.block_on(io.sources(false, &[]))?; + + // Filter the sources by target. + let targets = TargetSet::new(args.target.iter().map(|s| s.as_str())); + + // Convert vector to sets for packages and excluded packages. + let package_set = IndexSet::from_iter(args.package); + let exclude_set = IndexSet::from_iter(args.exclude); + + // Filter the sources by specified packages. + let packages = &srcs.get_package_list( + sess.manifest.package.name.to_string(), + &package_set, + &exclude_set, + args.no_deps, + ); + + let (targets, packages) = get_passed_targets(sess, &rt, &io, &targets, packages, &package_set)?; + + // Filter the sources by target and package. + let srcs = srcs + .filter_targets(&targets) + .unwrap_or_default() + .filter_packages(&packages) + .unwrap_or_default(); + + // Flatten and validate the sources. + let mut srcs = srcs + .flatten() + .into_iter() + .map(|f| f.validate(&ValidationContext::default())) + .collect::>>()?; + + if !args.files.is_empty() { + let include_dirs = args + .include_dir + .iter() + .map(|d| sess.intern_path(Path::new(d))) + .collect::>(); + let defines = args + .define + .iter() + .map(|d| { + let mut parts = d.splitn(2, '='); + let name = parts.next().unwrap_or_default().trim().to_string(); + let value = parts + .next() + .map(|v| sess.intern_string(v.trim().to_string())); + (name, value) + }) + .collect::>(); + let files = args + .files + .iter() + .map(|f| SourceFile::File(sess.intern_path(Path::new(f)), Some(SourceType::Verilog))) + .collect::>(); + + srcs.push(SourceGroup { + include_dirs, + defines, + files, + ..SourceGroup::default() + }); + } + + let print_opts = SlangPrintOpts { + expand_macros: args.expand_macros, + include_comments: !args.strip_comments, + squash_newlines: args.squash_newlines, + }; + + // Setup Output Writer, either to file or stdout + let raw_writer: Box = match &args.output { + Some(path) => Box::new( + File::create(path) + .map_err(|e| Error::new(format!("Cannot create output file: {}", e)))?, + ), + None => Box::new(std::io::stdout()), + }; + let mut writer = BufWriter::new(raw_writer); + + // Start JSON Array if needed + if args.ast_json { + write!(writer, "[")?; + } + + let mut parsed_trees = SyntaxTrees::new(); + let mut slang = SlangContext::new(); + for src_group in srcs { + // Collect include directories and defines from the source group and command line arguments. + let include_dirs: Vec = src_group + .include_dirs + .iter() + .chain(src_group.export_incdirs.values().flatten()) + .map(|path| path.to_string_lossy().into_owned()) + .chain(args.include_dir.iter().cloned()) + .collect(); + + // Collect defines from the source group and command line arguments. + let defines: Vec = src_group + .defines + .iter() + .map(|(def, value)| match value { + Some(v) => format!("{def}={v}"), + None => def.to_string(), + }) + .chain(args.define.iter().cloned()) + .collect(); + + // Set the include directories and defines in the Slang session. + slang.set_includes(&include_dirs).set_defines(&defines); + + // Collect file paths from the source group. + let file_paths: Vec = src_group + .files + .iter() + .filter_map(|source| match source { + SourceFile::File(path, Some(SourceType::Verilog)) => { + Some(path.to_string_lossy().into_owned()) + } + // Vhdl or unknown file types are not supported by Slang, so we emit a warning and skip them. + SourceFile::File(path, _) => { + Warnings::PickleNonVerilogFile(path.to_path_buf()).emit(); + None + } + // Groups should not exist at this point, + // as we have already flattened the sources. + _ => None, + }) + .collect(); + + let group_trees = slang.parse_files(&file_paths)?; + parsed_trees.append_trees(&group_trees); + } + + let reachable = if args.top.is_empty() { + (0..parsed_trees.len()).collect::>() + } else { + parsed_trees.reachable_indices(&args.top)? + }; + + let mut first_item = true; + for idx in reachable { + let tree = parsed_trees.tree_at(idx)?; + let renamed_tree = tree.rename( + args.prefix.as_deref(), + args.suffix.as_deref(), + &args.exclude_rename, + ); + if args.ast_json { + // JSON Array Logic: Prepend comma if not the first item + if !first_item { + write!(writer, ",")?; + } + write!(writer, "{:?}", renamed_tree)?; + first_item = false; + } else { + write!(writer, "{}", renamed_tree.display(print_opts))?; + } + } + + // Close JSON Array + if args.ast_json { + writeln!(writer, "]")?; + } + + Ok(()) +} diff --git a/src/cmd/script.rs b/src/cmd/script.rs index 5c6e9cee8..457dbeed8 100644 --- a/src/cmd/script.rs +++ b/src/cmd/script.rs @@ -496,15 +496,7 @@ fn emit_template( separate_files_in_group( src, |f| match f { - SourceFile::File(p, fmt) => match fmt { - Some(SourceType::Verilog) => Some(SourceType::Verilog), - Some(SourceType::Vhdl) => Some(SourceType::Vhdl), - _ => match p.extension().and_then(std::ffi::OsStr::to_str) { - Some("sv") | Some("v") | Some("vp") => Some(SourceType::Verilog), - Some("vhd") | Some("vhdl") => Some(SourceType::Vhdl), - _ => Some(SourceType::Unknown), - }, - }, + SourceFile::File(_, fmt) => *fmt, _ => None, }, |src, ty, files| { @@ -542,21 +534,17 @@ fn emit_template( SourceFile::Group(_) => unreachable!(), }) .collect(), - file_type: match ty { - SourceType::Verilog => "verilog".to_string(), - SourceType::Vhdl => "vhdl".to_string(), - SourceType::Unknown => "".to_string(), - }, + file_type: Some(ty), }); }, ); } for src in &split_srcs { - match src.file_type.as_str() { - "verilog" => { + match src.file_type { + Some(SourceType::Verilog) => { all_verilog.append(&mut src.files.clone().into_iter().collect()); } - "vhdl" => { + Some(SourceType::Vhdl) => { all_vhdl.append(&mut src.files.clone().into_iter().collect()); } _ => { @@ -608,5 +596,5 @@ struct TplSrcStruct { defines: IndexSet<(String, Option)>, incdirs: IndexSet, files: IndexSet, - file_type: String, + file_type: Option, } diff --git a/src/diagnostic.rs b/src/diagnostic.rs index 3c4fcce65..cc136791a 100644 --- a/src/diagnostic.rs +++ b/src/diagnostic.rs @@ -390,6 +390,10 @@ pub enum Warnings { #[error("Path {} for dependency {} does not exist.", fmt_path!(path.display()), fmt_pkg!(pkg))] #[diagnostic(code(W32))] DepPathMissing { pkg: String, path: PathBuf }, + + #[error("File {} is not a Verilog file and will be ignored in the pickle output.", fmt_path!(.0.display()))] + #[diagnostic(code(W33))] + PickleNonVerilogFile(PathBuf), } #[cfg(test)] diff --git a/src/error.rs b/src/error.rs index 02a1b9cc9..0b0f42580 100644 --- a/src/error.rs +++ b/src/error.rs @@ -145,3 +145,10 @@ impl From for Error { Error::chain("Cannot startup runtime.".to_string(), err) } } + +#[cfg(feature = "slang")] +impl From for Error { + fn from(err: bender_slang::SlangError) -> Error { + Error::chain("Slang error:", err) + } +} diff --git a/src/sess.rs b/src/sess.rs index 403fc82b2..a4837b7c6 100644 --- a/src/sess.rs +++ b/src/sess.rs @@ -414,17 +414,26 @@ impl<'ctx> Session<'ctx> { .files .iter() .map(|file| match *file { - config::SourceFile::File(ref path) => (path as &Path).into(), + config::SourceFile::File(ref path) => { + let ty = match path.extension().and_then(std::ffi::OsStr::to_str) { + Some("sv") | Some("v") | Some("vp") | Some("svh") => { + Some(crate::src::SourceType::Verilog) + } + Some("vhd") | Some("vhdl") => Some(crate::src::SourceType::Vhdl), + _ => None, + }; + crate::src::SourceFile::File(path as &Path, ty) + } config::SourceFile::SvFile(ref path) => crate::src::SourceFile::File( path as &Path, - &Some(crate::src::SourceType::Verilog), + Some(crate::src::SourceType::Verilog), ), config::SourceFile::VerilogFile(ref path) => crate::src::SourceFile::File( path as &Path, - &Some(crate::src::SourceType::Verilog), + Some(crate::src::SourceType::Verilog), ), config::SourceFile::VhdlFile(ref path) => { - crate::src::SourceFile::File(path as &Path, &Some(crate::src::SourceType::Vhdl)) + crate::src::SourceFile::File(path as &Path, Some(crate::src::SourceType::Vhdl)) } config::SourceFile::Group(ref group) => self .load_sources( diff --git a/src/src.rs b/src/src.rs index 4cdf2a0a6..c0a62c7ec 100644 --- a/src/src.rs +++ b/src/src.rs @@ -338,16 +338,13 @@ impl<'ctx> SourceGroup<'ctx> { } /// File types for a source file. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize)] +#[serde(rename_all = "lowercase")] pub enum SourceType { /// A Verilog file. Verilog, - // /// A SystemVerilog file. - // SystemVerilog, /// A VHDL file. Vhdl, - /// Unknown file type - Unknown, } /// A source file. @@ -356,7 +353,7 @@ pub enum SourceType { #[derive(Clone)] pub enum SourceFile<'ctx> { /// A file. - File(&'ctx Path, &'ctx Option), + File(&'ctx Path, Option), /// A group of files. Group(Box>), } @@ -383,12 +380,6 @@ impl<'ctx> From> for SourceFile<'ctx> { } } -impl<'ctx> From<&'ctx Path> for SourceFile<'ctx> { - fn from(path: &'ctx Path) -> SourceFile<'ctx> { - SourceFile::File(path, &None) - } -} - impl<'ctx> Validate for SourceFile<'ctx> { type Output = SourceFile<'ctx>; type Error = Error;