diff --git a/.cargo/audit.toml b/.cargo/audit.toml new file mode 100644 index 0000000..83a6292 --- /dev/null +++ b/.cargo/audit.toml @@ -0,0 +1,6 @@ +[advisories] +# RUSTSEC-2023-0071: rsa crate Marvin Attack (timing side-channel) +# Pulled in transitively by sqlx-mysql (MySQL backend). No fix available upstream. +# This is only exploitable in network-observable timing scenarios; the KMS server +# does not expose RSA operations through the MySQL connection path. +ignore = ["RUSTSEC-2023-0071"] diff --git a/.github/scripts/common.sh b/.github/scripts/common.sh new file mode 100755 index 0000000..8f2b1be --- /dev/null +++ b/.github/scripts/common.sh @@ -0,0 +1,134 @@ +#!/usr/bin/env bash +# Minimal shared helpers for authentication server CI scripts +# Source this file from other scripts: source "$(dirname "$0")/../scripts/common.sh" + +set -euo pipefail + +# ── macOS SDK helpers ──────────────────────────────────────────────────────── + +# Ensure macOS SDK path is available for the linker. +ensure_macos_sdk_env() { + if [ "$(uname -s)" != "Darwin" ]; then + return 0 + fi + + : "${DEVELOPER_DIR:=/Library/Developer/CommandLineTools}" + export DEVELOPER_DIR + if [ -d "${DEVELOPER_DIR}/usr/bin" ]; then + case ":${PATH}:" in + *":${DEVELOPER_DIR}/usr/bin:"*) : ;; + *) export PATH="${DEVELOPER_DIR}/usr/bin:${PATH}" ;; + esac + fi + + if [ -n "${SDKROOT:-}" ] && [ -d "${SDKROOT}" ]; then + : + else + if command -v xcrun >/dev/null 2>&1; then + local sdk + sdk="$(xcrun --sdk macosx --show-sdk-path 2>/dev/null || true)" + if [ -n "$sdk" ] && [ -d "$sdk" ]; then + export SDKROOT="$sdk" + fi + fi + + if [ -z "${SDKROOT:-}" ]; then + local clt_sdk="/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk" + if [ -d "$clt_sdk" ]; then + export SDKROOT="$clt_sdk" + fi + fi + fi + + if [ -n "${SDKROOT:-}" ] && [ -d "${SDKROOT}" ]; then + unset CPATH C_INCLUDE_PATH CPLUS_INCLUDE_PATH OBJC_INCLUDE_PATH + unset NIX_CFLAGS_COMPILE NIX_CFLAGS_LINK NIX_LDFLAGS + + local sysroot_flag="-isysroot ${SDKROOT}" + local framework_dir="${SDKROOT}/System/Library/Frameworks" + local framework_flags="" + if [ -d "${framework_dir}" ]; then + framework_flags="-F${framework_dir} -iframework ${framework_dir}" + fi + + export CFLAGS="${sysroot_flag} ${framework_flags} ${CFLAGS:-}" + export CPPFLAGS="${sysroot_flag} ${framework_flags} ${CPPFLAGS:-}" + export CXXFLAGS="${sysroot_flag} ${framework_flags} ${CXXFLAGS:-}" + fi +} + +ensure_macos_frameworks_ldflags() { + if [ "$(uname -s)" != "Darwin" ]; then + return 0 + fi + + if [ -z "${SDKROOT:-}" ] || [ ! -d "${SDKROOT}" ]; then + ensure_macos_sdk_env || true + fi + + if [ -z "${SDKROOT:-}" ] || [ ! -d "${SDKROOT}" ]; then + return 0 + fi + + local frameworks_dir="${SDKROOT}/System/Library/Frameworks" + if [ ! -d "${frameworks_dir}" ]; then + return 0 + fi + + local fw_ldflags="-F${frameworks_dir} -Wl,-F,${frameworks_dir}" + export LDFLAGS="${fw_ldflags} ${LDFLAGS:-}" + export RUSTFLAGS="-C link-arg=-F${frameworks_dir} -C link-arg=-Wl,-F,${frameworks_dir} ${RUSTFLAGS:-}" +} + +# ── Pinned nixpkgs ──────────────────────────────────────────────────────────── + +# Single source of truth for the pinned nixpkgs URL. +# IMPORTANT: Use an immutable commit tarball for deterministic builds. +export PIN_URL="https://package.cosmian.com/nixpkgs/8b27c1239e5c421a2bbc2c65d52e4a6fbf2ff296.tar.gz" +export PINNED_NIXPKGS_URL="$PIN_URL" + +# ── Build environment ──────────────────────────────────────────────────────── + +# Initialize build/test configuration from CLI args +# Exports: LINK (static|dynamic), BUILD_PROFILE +init_build_env() { + local link="static" + local link_set=0 + + local i=1 + while [ $i -le $# ]; do + case "${!i}" in + --link) + link_set=1 + i=$((i + 1)) + link="${!i:-}" + ;; + esac + i=$((i + 1)) + done + + if [ $link_set -eq 0 ] && [ -n "${LINK:-}" ]; then + case "${LINK}" in + static | dynamic) link="${LINK}" ;; + esac + fi + + case "$link" in + static | dynamic) : ;; + *) + echo "Error: --link must be 'static' or 'dynamic'" >&2 + exit 1 + ;; + esac + + export LINK="$link" +} + +# Ensure a modern Rust toolchain is available on PATH +ensure_modern_rust() { + if command -v rustup >/dev/null 2>&1; then + if ! rustup which cargo >/dev/null 2>&1; then + rustup default stable + fi + fi +} diff --git a/.github/scripts/nix.sh b/.github/scripts/nix.sh new file mode 100755 index 0000000..a4f4ecf --- /dev/null +++ b/.github/scripts/nix.sh @@ -0,0 +1,448 @@ +#!/usr/bin/env bash +# Unified entrypoint for authentication server CI: test and packaging workflows. +set -euo pipefail + +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +# shellcheck source=.github/scripts/common.sh +source "$SCRIPT_DIR/common.sh" + +# ── Usage ──────────────────────────────────────────────────────────────────── + +usage() { + cat < OpenSSL linkage (default: static) + static: statically link OpenSSL (vendored in Rust crate) + dynamic: dynamically link system OpenSSL + + Examples: + $0 test # all tests + $0 test sqlite + $0 test psql + $0 --link static package + $0 --link static package deb + $0 --link dynamic package rpm + $0 --link static package dmg # macOS only + $0 docker --load + $0 docker --load --test + $0 update-hashes +EOF + exit 1 +} + +# ── Helpers ────────────────────────────────────────────────────────────────── + +compute_sha256() { + local file="$1" + if command -v sha256sum >/dev/null 2>&1; then + sha256sum "$file" | awk '{print $1}' + else + shasum -a 256 "$file" | awk '{print $1}' + fi +} + +resolve_pinned_nixpkgs_store() { + local path + if path=$(nix eval --raw "(builtins.fetchTarball \"${PINNED_NIXPKGS_URL}\")" 2>/dev/null); then + : + else + path=$(nix-instantiate --eval -E "builtins.fetchTarball { url = \"${PINNED_NIXPKGS_URL}\"; }" | sed -e 's/\"//g') || path="" + fi + if [ -n "$path" ] && [ -e "$path" ]; then + echo "$path" + return 0 + fi + return 1 +} + +prewarm_nixpkgs_and_tools() { + if [ -n "${NO_PREWARM:-}" ]; then + echo "Skipping prewarm (NO_PREWARM set)" + return 0 + fi + echo "Prewarming pinned nixpkgs into the store…" + if ! resolve_pinned_nixpkgs_store >/dev/null; then + nix-instantiate --eval -E "builtins.fetchTarball { url = \"${PINNED_NIXPKGS_URL}\"; }" >/dev/null + fi + local NIXPKGS_STORE + NIXPKGS_STORE=$(resolve_pinned_nixpkgs_store || true) + if [ -n "$NIXPKGS_STORE" ]; then + export NIXPKGS_STORE + echo "Pinned nixpkgs realized at: $NIXPKGS_STORE" + if [ "$(uname)" = "Linux" ]; then + nix-build -I "nixpkgs=${NIXPKGS_STORE}" -E 'with import {}; dpkg' --no-out-link >/dev/null 2>/dev/null || true + nix-build -I "nixpkgs=${NIXPKGS_STORE}" -E 'with import {}; rpm' --no-out-link >/dev/null 2>/dev/null || true + nix-build -I "nixpkgs=${NIXPKGS_STORE}" -E 'with import {}; cpio' --no-out-link >/dev/null 2>/dev/null || true + fi + fi +} + +set_repo_root() { + REPO_ROOT=$(cd "$(dirname "$0")/../.." && pwd) + cd "$REPO_ROOT" +} + +ensure_nix_path() { + PINNED_NIXPKGS_URL="$PIN_URL" + if [ -z "${NIX_PATH:-}" ]; then + export NIX_PATH="nixpkgs=${PINNED_NIXPKGS_URL}" + fi +} + +# ── Argument parsing ───────────────────────────────────────────────────────── + +parse_global_options() { + LINK="static" + COMMAND="" + + while [ $# -gt 0 ]; do + case "$1" in + -l | --link) + LINK="${2:-}" + shift 2 || true + ;; + docker | test | package | update-hashes) + COMMAND="$1" + shift + break + ;; + -h | --help) + usage + ;; + *) + if [ -n "${COMMAND:-}" ]; then + break + fi + echo "Unknown option: $1" >&2 + usage + ;; + esac + done + + [ -z "${COMMAND:-}" ] && usage + + if [ "$COMMAND" = "package" ]; then + RELEASE_FLAG="--release" + BUILD_PROFILE="release" + else + RELEASE_FLAG="" + BUILD_PROFILE="debug" + fi + + export LINK RELEASE_FLAG BUILD_PROFILE + REMAINING_ARGS=("$@") +} + +resolve_command_args() { + local -a args=() + args=("$@") + COMMAND_ARGS=() + + TEST_TYPE="" + if [ "$COMMAND" = "test" ]; then + if [ ${#args[@]} -eq 0 ]; then + TEST_TYPE="all" + else + TEST_TYPE="${args[0]}" + args=("${args[@]:1}") + fi + fi + + PACKAGE_TYPE="" + if [ "$COMMAND" = "package" ]; then + if [ ${#args[@]} -ge 1 ]; then + PACKAGE_TYPE="${args[0]}" + args=("${args[@]:1}") + fi + fi + + if [ "$COMMAND" = "test" ]; then + export WITH_WGET=1 + fi + + COMMAND_ARGS=("${args[@]+"${args[@]}"}") +} + +dispatch_command() { + parse_global_options "$@" + resolve_command_args ${REMAINING_ARGS[@]+"${REMAINING_ARGS[@]}"} + + case "$COMMAND" in + docker) + docker_command ${COMMAND_ARGS[@]+"${COMMAND_ARGS[@]}"} + ;; + test) + test_command ${COMMAND_ARGS[@]+"${COMMAND_ARGS[@]}"} + ;; + package) + package_command ${COMMAND_ARGS[@]+"${COMMAND_ARGS[@]}"} + ;; + update-hashes) + update_hashes_command ${COMMAND_ARGS[@]+"${COMMAND_ARGS[@]}"} + ;; + *) + echo "Error: Unknown command '$COMMAND'" >&2 + usage + ;; + esac +} + +# ── Docker command ──────────────────────────────────────────────────────────── + +docker_command() { + DOCKER_LOAD=false + DOCKER_TEST=false + DOCKER_FORCE=false + while [ $# -gt 0 ]; do + case "$1" in + --force) + DOCKER_FORCE=true + shift + ;; + --load) + DOCKER_LOAD=true + shift + ;; + --test) + DOCKER_TEST=true + DOCKER_LOAD=true + shift + ;; + --) + shift + break + ;; + *) break ;; + esac + done + + if [ "$(uname)" = "Darwin" ]; then + echo "Error: Docker image builds require a Linux builder." >&2 + exit 1 + fi + + ATTR="docker-image" + VERSION=$(bash "$REPO_ROOT/.github/scripts/release/get_version.sh") + OUT_LINK="$REPO_ROOT/result-docker-static" + + if [ -n "${FORCE_REBUILD:-}" ]; then + DOCKER_FORCE=true + fi + + if [ "$DOCKER_FORCE" != true ] && [ -L "$OUT_LINK" ] && REAL_OUT=$(readlink -f "$OUT_LINK" || true) && [ -f "$REAL_OUT" ]; then + echo "Reusing existing Docker image tarball at: $REAL_OUT (use --force to rebuild)" + else + echo "Building Docker image: attr=$ATTR -> $OUT_LINK" + nix-build -I "nixpkgs=${PIN_URL}" -A "$ATTR" -o "$OUT_LINK" + REAL_OUT=$(readlink -f "$OUT_LINK" || echo "$OUT_LINK") + echo "Built Docker image tarball: $REAL_OUT" + fi + + if [ "$DOCKER_LOAD" = true ]; then + if command -v docker >/dev/null 2>&1; then + echo "Loading image into Docker (from $REAL_OUT)…" + docker load <"$REAL_OUT" + + if [ "$DOCKER_TEST" = true ]; then + echo "Running Docker image tests..." + DOCKER_IMAGE_NAME="cosmian-auth-server:${VERSION}" + export DOCKER_IMAGE_NAME + bash "$REPO_ROOT/.github/scripts/test/test_docker_image.sh" + fi + else + echo "Warning: docker CLI not found; skipping --load" >&2 + fi + fi + + exit 0 +} + +# ── Test command ───────────────────────────────────────────────────────────── + +test_command() { + case "$TEST_TYPE" in + all) + SCRIPT="$REPO_ROOT/.github/scripts/test/test_all.sh" + ;; + sqlite) + SCRIPT="$REPO_ROOT/.github/scripts/test/test_sqlite.sh" + ;; + psql | postgres) + SCRIPT="$REPO_ROOT/.github/scripts/test/test_psql.sh" + ;; + *) + echo "Error: Unknown test type '$TEST_TYPE'" >&2 + echo "Valid types: all, sqlite, psql" >&2 + usage + ;; + esac + + export WITH_CURL=1 + + KEEP_VARS=" \ + --keep POSTGRES_HOST --keep POSTGRES_PORT \ + --keep AUTH_DATABASE_URL \ + --keep WITH_WGET \ + --keep WITH_CURL \ + --keep LINK \ + --keep RELEASE_FLAG \ + --keep BUILD_PROFILE" + + # Run inside nix-shell + # shellcheck disable=SC2086 + nix-shell -I "nixpkgs=${PIN_URL}" $KEEP_VARS "$REPO_ROOT/shell.nix" \ + --run "bash '$SCRIPT'" +} + +# ── Package command ─────────────────────────────────────────────────────────── + +package_command() { + case "$PACKAGE_TYPE" in + "" | deb | rpm | dmg) : ;; + *) + echo "Error: Unknown package type '$PACKAGE_TYPE'" >&2 + usage + ;; + esac + + # macOS: DMG only via nix-shell (needs system tools: hdiutil, osascript) + if [ "$(uname)" = "Darwin" ]; then + local pkg_type="${PACKAGE_TYPE:-dmg}" + if [ "$pkg_type" = "dmg" ]; then + SCRIPT="$REPO_ROOT/.github/scripts/package/package_dmg.sh" + nix-shell -I "nixpkgs=${PIN_URL}" --argstr variant "default" "$REPO_ROOT/shell.nix" \ + --run "bash '$SCRIPT' --link '$LINK'" + OUT_DIR="$REPO_ROOT/result-dmg-$LINK" + dmg_file=$(find "$OUT_DIR" -maxdepth 1 -type f -name '*.dmg' 2>/dev/null | head -n1 || true) + if [ -n "${dmg_file:-}" ] && [ -f "$dmg_file" ]; then + sum=$(compute_sha256 "$dmg_file") + echo "$sum $(basename "$dmg_file")" >"$dmg_file.sha256" + echo "Wrote checksum: $dmg_file.sha256 ($sum)" + fi + exit 0 + fi + fi + + ensure_nix_path + prewarm_nixpkgs_and_tools || true + + NIXPKGS_STORE="${NIXPKGS_STORE:-}" + NIXPKGS_ARG="$PINNED_NIXPKGS_URL" + if [ -n "$NIXPKGS_STORE" ] && [ -e "$NIXPKGS_STORE" ]; then + NIXPKGS_ARG="$NIXPKGS_STORE" + fi + + # Determine which package types to build + if [ -z "$PACKAGE_TYPE" ]; then + TYPES="deb rpm" + else + TYPES="$PACKAGE_TYPE" + fi + + for TYPE in $TYPES; do + case "$TYPE" in + deb) + if [ "$(uname)" = "Linux" ]; then + SCRIPT_LINUX="$REPO_ROOT/.github/scripts/package/package_deb.sh" + nix-shell -I "nixpkgs=${NIXPKGS_ARG}" -p curl --run "bash '$SCRIPT_LINUX' --link '$LINK'" + REAL_OUT="$REPO_ROOT/result-deb-$LINK" + echo "Built deb ($LINK): $REAL_OUT" + + # Smoke test + SMOKE_TEST="$REPO_ROOT/.github/scripts/package/smoke_test_deb.sh" + DEB_FILE=$(find "$REAL_OUT" -maxdepth 1 -type f -name '*.deb' 2>/dev/null | head -n1 || true) + if [ -n "${DEB_FILE:-}" ] && [ -f "$DEB_FILE" ] && [ -f "$SMOKE_TEST" ]; then + nix-shell -I "nixpkgs=${NIXPKGS_ARG}" -p binutils file coreutils --run "bash '$SMOKE_TEST' '$DEB_FILE'" || { + echo "ERROR: Smoke test failed for $DEB_FILE" >&2 + exit 1 + } + fi + else + echo "DEB packaging is only supported on Linux." >&2 + exit 1 + fi + ;; + rpm) + if [ "$(uname)" = "Linux" ]; then + SCRIPT_LINUX="$REPO_ROOT/.github/scripts/package/package_rpm.sh" + nix-shell -I "nixpkgs=${NIXPKGS_ARG}" -p curl --run "bash '$SCRIPT_LINUX' --link '$LINK'" + REAL_OUT="$REPO_ROOT/result-rpm-$LINK" + echo "Built rpm ($LINK): $REAL_OUT" + + SMOKE_TEST="$REPO_ROOT/.github/scripts/package/smoke_test_rpm.sh" + RPM_FILE=$(find "$REAL_OUT" -maxdepth 1 -type f -name '*.rpm' 2>/dev/null | head -n1 || true) + if [ -n "${RPM_FILE:-}" ] && [ -f "$RPM_FILE" ] && [ -f "$SMOKE_TEST" ]; then + nix-shell -I "nixpkgs=${NIXPKGS_ARG}" -p binutils file coreutils rpm cpio --run "bash '$SMOKE_TEST' '$RPM_FILE'" || { + echo "ERROR: Smoke test failed for $RPM_FILE" >&2 + exit 1 + } + fi + else + echo "RPM packaging is only supported on Linux." >&2 + exit 1 + fi + ;; + dmg) + if [ "$(uname)" = "Darwin" ]; then + SCRIPT_DARWIN="$REPO_ROOT/.github/scripts/package/package_dmg.sh" + nix-shell -I "nixpkgs=${NIXPKGS_ARG}" --argstr variant "default" "$REPO_ROOT/shell.nix" \ + --run "bash '$SCRIPT_DARWIN' --link '$LINK'" + echo "Built dmg ($LINK): $REPO_ROOT/result-dmg-$LINK" + else + echo "DMG packaging is only supported on macOS." >&2 + exit 1 + fi + ;; + esac + done +} + +# ── Update-hashes command ───────────────────────────────────────────────────── + +update_hashes_command() { + SCRIPT="$REPO_ROOT/.github/scripts/release/update_hashes.sh" + if [ -f "$SCRIPT" ]; then + bash "$SCRIPT" "$@" + else + echo "Building auth-server (static) to capture hashes..." + ATTR="auth-server-static" + OUT_LINK="$REPO_ROOT/result-server-static" + nix-build -I "nixpkgs=${PIN_URL}" "$REPO_ROOT/default.nix" -A "$ATTR" -o "$OUT_LINK" + REAL_OUT=$(readlink -f "$OUT_LINK") + + # Copy generated hash file from derivation output + HASHES_DIR="$REPO_ROOT/nix/expected-hashes" + mkdir -p "$HASHES_DIR" + find "$REAL_OUT/bin" -name 'auth-server.*.sha256' 2>/dev/null | while IFS= read -r src; do + fname=$(basename "$src") + cp -f "$src" "$HASHES_DIR/$fname" + echo "Updated: nix/expected-hashes/$fname" + done + echo "Done. Run the same for --link dynamic if needed." + fi + exit 0 +} + +# ── Main ───────────────────────────────────────────────────────────────────── + +set_repo_root +ensure_nix_path +dispatch_command "$@" diff --git a/.github/scripts/package/package_common.sh b/.github/scripts/package/package_common.sh new file mode 100755 index 0000000..c00d5d8 --- /dev/null +++ b/.github/scripts/package/package_common.sh @@ -0,0 +1,228 @@ +#!/usr/bin/env bash +# Common packaging logic for Cosmian Authentication Server +# Builds auth_server via Nix and packages it with cargo-deb / cargo-generate-rpm. +set -euo pipefail + +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +REPO_ROOT=$(cd "$SCRIPT_DIR/../../.." && pwd) +source "$REPO_ROOT/.github/scripts/common.sh" +cd "$REPO_ROOT" + +FORMAT="" +LINK="static" + +usage() { + echo "Usage: $0 --format deb|rpm [--link static|dynamic]" >&2 + exit 2 +} + +while [ $# -gt 0 ]; do + case "$1" in + -f | --format) + FORMAT="${2:-}" + shift 2 || true + ;; + -l | --link) + LINK="${2:-}" + shift 2 || true + ;; + -h | --help) usage ;; + *) shift ;; + esac +done + +case "$FORMAT" in +deb | rpm) : ;; +*) + echo "Error: --format must be 'deb' or 'rpm'" >&2 + usage + ;; +esac +case "$LINK" in +static | dynamic) : ;; +*) + echo "Error: --link must be 'static' or 'dynamic'" >&2 + exit 1 + ;; +esac + +# Persistent Cargo cache for offline runs +OFFLINE_CARGO_HOME="$REPO_ROOT/target/cargo-offline-home" + +# ── Pre-warm Cargo registry ───────────────────────────────────────────────── + +prewarm_cargo_registry() { + if [ -n "${NO_PREWARM:-}" ]; then return; fi + ensure_modern_rust + mkdir -p "$OFFLINE_CARGO_HOME" + export CARGO_HOME="$OFFLINE_CARGO_HOME" + echo "Prewarming Cargo registry…" + cargo fetch --locked || true +} + +# ── Build server via Nix ──────────────────────────────────────────────────── + +build_or_reuse_server() { + local attr + if [ "$LINK" = "dynamic" ]; then + attr="auth-server-dynamic-openssl" + else + attr="auth-server-static-openssl" + fi + + OUT_LINK="$REPO_ROOT/result-server-${LINK}" + + nix-build -I "nixpkgs=${PIN_URL}" "$REPO_ROOT/default.nix" -A "$attr" -o "$OUT_LINK" + REAL_SERVER=$(readlink -f "$OUT_LINK" || echo "$OUT_LINK") + BIN_OUT="$REAL_SERVER/bin/auth_server" + + if [ ! -f "$BIN_OUT" ]; then + echo "ERROR: auth_server binary not found at $BIN_OUT" >&2 + exit 1 + fi + echo "Server binary: $BIN_OUT" +} + +# ── Stage binary where cargo-deb/rpm expect it ───────────────────────────── + +stage_binary() { + local host_triple + host_triple=$(rustc -vV 2>/dev/null | awk '/host:/ {print $2}' || echo "") + mkdir -p "server/target/release" "target/release" + if [ -n "$host_triple" ]; then + mkdir -p "server/target/${host_triple}/release" "target/${host_triple}/release" + cp -f "$BIN_OUT" "server/target/${host_triple}/release/auth_server" + cp -f "$BIN_OUT" "target/${host_triple}/release/auth_server" + fi + cp -f "$BIN_OUT" "server/target/release/auth_server" + cp -f "$BIN_OUT" "target/release/auth_server" +} + +# ── GPG signing ──────────────────────────────────────────────────────────── + +gpg_sign_file() { + local file="$1" + if [ -z "${GPG_SIGNING_KEY:-}" ] || ! command -v gpg >/dev/null 2>&1; then + return 0 + fi + echo "GPG-signing: $file" + # (Re-)import the signing key in the current $HOME. This is necessary because + # build_deb / build_rpm reset HOME to $TMPDIR for Cargo, so GPG's default + # keyring (~/.gnupg) is a fresh empty directory — different from the one where + # crazy-max/ghaction-import-gpg originally imported the key. + if ! echo "$GPG_SIGNING_KEY" | gpg --batch --import 2>/dev/null; then + echo "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import + fi + if [ -n "${GPG_SIGNING_KEY_PASSPHRASE:-}" ]; then + echo "$GPG_SIGNING_KEY_PASSPHRASE" | gpg --batch --yes \ + --passphrase-fd 0 --pinentry-mode loopback \ + --detach-sign --armor "$file" + else + gpg --batch --yes --detach-sign --armor "$file" + fi +} + +# ── DEB packaging ─────────────────────────────────────────────────────────── + +build_deb() { + export HOME="${TMPDIR:-/tmp}" + export CARGO_HOME="$HOME/cargo-home" + mkdir -p "$CARGO_HOME" + + if command -v cargo-deb >/dev/null 2>&1; then + CARGO_DEB="cargo-deb" + else + ensure_modern_rust + cargo install cargo-deb --locked || true + CARGO_DEB="cargo deb" + fi + + VERSION_STR=$(bash "$REPO_ROOT/.github/scripts/release/get_version.sh") + OUT_DIR="$REPO_ROOT/result-deb-${LINK}" + mkdir -p "$OUT_DIR" + + echo "Building DEB for auth_server v${VERSION_STR} (link=$LINK)…" + pushd "$REPO_ROOT/server" >/dev/null + + # shellcheck disable=SC2086 + $CARGO_DEB \ + --no-build \ + --target "$(rustc -vV 2>/dev/null | awk '/host:/ {print $2}')" \ + -p auth_server \ + --output "$OUT_DIR/" + + popd >/dev/null + + DEB_FILE=$(find "$OUT_DIR" -maxdepth 1 -name '*.deb' | head -1) + if [ -z "$DEB_FILE" ]; then + echo "ERROR: DEB file not found in $OUT_DIR" >&2 + exit 1 + fi + + # Compute SHA256 + sum=$(sha256sum "$DEB_FILE" | awk '{print $1}') + echo "$sum $(basename "$DEB_FILE")" >"$DEB_FILE.sha256" + echo "Built DEB: $DEB_FILE (sha256: $sum)" + + gpg_sign_file "$DEB_FILE" +} + +# ── RPM packaging ─────────────────────────────────────────────────────────── + +build_rpm() { + export HOME="${TMPDIR:-/tmp}" + export CARGO_HOME="$HOME/cargo-home" + mkdir -p "$CARGO_HOME" + + VERSION_STR=$(bash "$REPO_ROOT/.github/scripts/release/get_version.sh") + OUT_DIR="$REPO_ROOT/result-rpm-${LINK}" + mkdir -p "$OUT_DIR" + + echo "Building RPM for auth_server v${VERSION_STR} (link=$LINK)…" + pushd "$REPO_ROOT" >/dev/null + + # Use cargo-generate-rpm from Nix derivation + CARGO_GENERATE_RPM_BIN="" + if command -v cargo-generate-rpm >/dev/null 2>&1; then + CARGO_GENERATE_RPM_BIN="cargo-generate-rpm" + else + RPM_FROM_NIX=$(nix-build -I "nixpkgs=${PIN_URL}" --option substituters "" "$REPO_ROOT/default.nix" -A cargoGenerateRpmTool --no-out-link 2>/dev/null || true) + if [ -n "$RPM_FROM_NIX" ] && [ -x "$RPM_FROM_NIX/bin/cargo-generate-rpm" ]; then + CARGO_GENERATE_RPM_BIN="$RPM_FROM_NIX/bin/cargo-generate-rpm" + else + ensure_modern_rust + cargo install cargo-generate-rpm --version "0.16.0" --locked || true + CARGO_GENERATE_RPM_BIN="$CARGO_HOME/bin/cargo-generate-rpm" + fi + fi + + "$CARGO_GENERATE_RPM_BIN" \ + -p server \ + -o "$OUT_DIR/" + + popd >/dev/null + + RPM_FILE=$(find "$OUT_DIR" -maxdepth 1 -name '*.rpm' | head -1) + if [ -z "$RPM_FILE" ]; then + echo "ERROR: RPM file not found in $OUT_DIR" >&2 + exit 1 + fi + + sum=$(sha256sum "$RPM_FILE" | awk '{print $1}') + echo "$sum $(basename "$RPM_FILE")" >"$RPM_FILE.sha256" + echo "Built RPM: $RPM_FILE (sha256: $sum)" + + gpg_sign_file "$RPM_FILE" +} + +# ── Main ───────────────────────────────────────────────────────────────────── + +prewarm_cargo_registry || true +build_or_reuse_server +stage_binary + +if [ "$FORMAT" = "deb" ]; then + build_deb +elif [ "$FORMAT" = "rpm" ]; then + build_rpm +fi diff --git a/.github/scripts/package/package_deb.sh b/.github/scripts/package/package_deb.sh new file mode 100755 index 0000000..919d217 --- /dev/null +++ b/.github/scripts/package/package_deb.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +# Build Debian package for Cosmian Authentication Server +set -euo pipefail +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +"$SCRIPT_DIR/package_common.sh" --format deb "$@" diff --git a/.github/scripts/package/package_dmg.sh b/.github/scripts/package/package_dmg.sh new file mode 100755 index 0000000..2edb018 --- /dev/null +++ b/.github/scripts/package/package_dmg.sh @@ -0,0 +1,95 @@ +#!/usr/bin/env bash +# Build macOS DMG for Cosmian Authentication Server via cargo-packager inside nix-shell. +set -euo pipefail + +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +REPO_ROOT=$(cd "$SCRIPT_DIR/../../.." && pwd) +cd "$REPO_ROOT" +source "$REPO_ROOT/.github/scripts/common.sh" + +LINK="static" +while [ $# -gt 0 ]; do + case "$1" in + -l | --link) + LINK="${2:-}" + shift 2 || true + ;; + *) shift ;; + esac +done + +# Only supported on macOS +if [ "$(uname)" != "Darwin" ]; then + echo "Error: DMG packaging is only supported on macOS." >&2 + exit 1 +fi + +ensure_macos_sdk_env +ensure_macos_frameworks_ldflags + +VERSION_STR=$(bash "$REPO_ROOT/.github/scripts/release/get_version.sh") + +# Build or reuse server binary via Nix +if [ "$LINK" = "dynamic" ]; then + ATTR="auth-server-dynamic-openssl" +else + ATTR="auth-server-static-openssl" +fi +OUT_LINK="$REPO_ROOT/result-server-${LINK}" +nix-build -I "nixpkgs=${PIN_URL}" -A "$ATTR" -o "$OUT_LINK" +REAL_OUT=$(readlink -f "$OUT_LINK" || echo "$OUT_LINK") +BIN_OUT="$REAL_OUT/bin/auth_server" + +# Stage binary +HOST_TRIPLE=$(rustc -vV 2>/dev/null | awk '/host:/ {print $2}' || echo "") +mkdir -p "server/target/release" "target/release" +[ -n "$HOST_TRIPLE" ] && mkdir -p "server/target/$HOST_TRIPLE/release" && cp -f "$BIN_OUT" "server/target/$HOST_TRIPLE/release/auth_server" +cp -f "$BIN_OUT" "server/target/release/auth_server" +cp -f "$BIN_OUT" "target/release/auth_server" + +export HOME="${TMPDIR:-/tmp}" +export CARGO_HOME="$HOME/cargo-home" +mkdir -p "$CARGO_HOME" +export PATH="/usr/bin:/bin:/usr/sbin:/sbin:$PATH" + +echo "Building DMG for auth_server v${VERSION_STR} (link=${LINK})…" + +# Use cargo-packager if available +if command -v cargo-packager >/dev/null 2>&1; then + PACKAGER="cargo-packager" +else + echo "cargo-packager not found; install it first or add it to nix shell" >&2 + exit 1 +fi + +$PACKAGER \ + --manifest-path server/Cargo.toml \ + --release \ + --formats dmg + +OUT_DIR="$REPO_ROOT/result-dmg-${LINK}" +mkdir -p "$OUT_DIR" +find "$REPO_ROOT" -maxdepth 4 -name '*.dmg' -newer "$REPO_ROOT/Cargo.toml" 2>/dev/null | while IFS= read -r dmg; do + cp -f "$dmg" "$OUT_DIR/" + sum=$(shasum -a 256 "$dmg" | awk '{print $1}') + echo "$sum $(basename "$dmg")" >"$OUT_DIR/$(basename "$dmg").sha256" + echo "Built DMG: $dmg (sha256: $sum)" +done + +# GPG sign +if [ -n "${GPG_SIGNING_KEY:-}" ] && command -v gpg >/dev/null 2>&1; then + # (Re-)import the signing key in the current $HOME (export HOME="${TMPDIR}" above + # means the default keyring is a fresh empty directory). + if ! echo "$GPG_SIGNING_KEY" | gpg --batch --import 2>/dev/null; then + echo "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import + fi + find "$OUT_DIR" -name '*.dmg' | while IFS= read -r dmg; do + if [ -n "${GPG_SIGNING_KEY_PASSPHRASE:-}" ]; then + echo "$GPG_SIGNING_KEY_PASSPHRASE" | gpg --batch --yes \ + --passphrase-fd 0 --pinentry-mode loopback \ + --detach-sign --armor "$dmg" + else + gpg --batch --yes --detach-sign --armor "$dmg" + fi + done +fi diff --git a/.github/scripts/package/package_rpm.sh b/.github/scripts/package/package_rpm.sh new file mode 100755 index 0000000..4bb9f36 --- /dev/null +++ b/.github/scripts/package/package_rpm.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +# Build RPM package for Cosmian Authentication Server +set -euo pipefail +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +"$SCRIPT_DIR/package_common.sh" --format rpm "$@" diff --git a/.github/scripts/package/smoke_test_deb.sh b/.github/scripts/package/smoke_test_deb.sh new file mode 100755 index 0000000..5c6b265 --- /dev/null +++ b/.github/scripts/package/smoke_test_deb.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +# Smoke-test for the auth_server Debian package +# Verifies binary presence, ELF properties, and GLIBC version. +set -euo pipefail + +DEB_FILE="${1:-}" +if [ -z "$DEB_FILE" ] || [ ! -f "$DEB_FILE" ]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +TMPDIR_EXTRACT=$(mktemp -d) +trap 'rm -rf "$TMPDIR_EXTRACT"' EXIT + +echo "===========================================" +echo "Smoke-testing DEB: $DEB_FILE" +echo "===========================================" + +# Extract package +dpkg-deb --extract "$DEB_FILE" "$TMPDIR_EXTRACT" || ar -x "$DEB_FILE" --output="$TMPDIR_EXTRACT" || true +# Find binary +BIN=$(find "$TMPDIR_EXTRACT" -type f -name 'auth_server' | head -1) +if [ -z "$BIN" ]; then + echo "ERROR: auth_server binary not found inside $DEB_FILE" >&2 + exit 1 +fi +echo "Binary: $BIN" + +file "$BIN" +readelf -h "$BIN" | head -20 || true + +# GLIBC version check (max GLIBC <= 2.34 for Rocky Linux 9 compatibility) +MAX_GLIBC=$(readelf -sW "$BIN" | grep -o 'GLIBC_[0-9][0-9.]*' | sed 's/^GLIBC_//' | sort -V | tail -n1 || true) +if [ -n "$MAX_GLIBC" ]; then + echo "Max GLIBC: GLIBC_$MAX_GLIBC" + if [ "$(printf '%s\n' "$MAX_GLIBC" "2.34" | sort -V | tail -n1)" != "2.34" ]; then + echo "ERROR: GLIBC $MAX_GLIBC > 2.34 — not compatible with Rocky Linux 9" >&2 + exit 1 + fi + echo "GLIBC version check PASSED ($MAX_GLIBC <= 2.34)" +fi + +# ELF interpreter check: must NOT be in /nix/store +INTERP=$(readelf -l "$BIN" | sed -n 's/^.*interpreter: \(.*\)]$/\1/p' || true) +if echo "$INTERP" | grep -q "/nix/store/"; then + echo "ERROR: ELF interpreter is in Nix store: $INTERP" >&2 + exit 1 +fi +echo "ELF interpreter: ${INTERP:-}" + +echo "===========================================" +echo "Smoke test PASSED for $DEB_FILE" +echo "===========================================" diff --git a/.github/scripts/package/smoke_test_dmg.sh b/.github/scripts/package/smoke_test_dmg.sh new file mode 100755 index 0000000..2a2fe9a --- /dev/null +++ b/.github/scripts/package/smoke_test_dmg.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# Smoke-test for the macOS DMG package +set -euo pipefail + +DMG_FILE="${1:-}" +if [ -z "$DMG_FILE" ] || [ ! -f "$DMG_FILE" ]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +echo "===========================================" +echo "Smoke-testing DMG: $DMG_FILE" +echo "===========================================" + +MOUNT_POINT=$(mktemp -d) +trap 'hdiutil detach "$MOUNT_POINT" 2>/dev/null || true; rm -rf "$MOUNT_POINT"' EXIT + +hdiutil attach "$DMG_FILE" -mountpoint "$MOUNT_POINT" -nobrowse -quiet + +BIN=$(find "$MOUNT_POINT" -type f -name 'auth_server' | head -1) +if [ -z "$BIN" ]; then + echo "ERROR: auth_server binary not found in DMG $DMG_FILE" >&2 + exit 1 +fi +echo "Binary: $BIN" + +file "$BIN" +otool -L "$BIN" || true + +echo "===========================================" +echo "Smoke test PASSED for $DMG_FILE" +echo "===========================================" diff --git a/.github/scripts/package/smoke_test_rpm.sh b/.github/scripts/package/smoke_test_rpm.sh new file mode 100755 index 0000000..7095bf4 --- /dev/null +++ b/.github/scripts/package/smoke_test_rpm.sh @@ -0,0 +1,49 @@ +#!/usr/bin/env bash +# Smoke-test for the auth_server RPM package +set -euo pipefail + +RPM_FILE="${1:-}" +if [ -z "$RPM_FILE" ] || [ ! -f "$RPM_FILE" ]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +TMPDIR_EXTRACT=$(mktemp -d) +trap 'rm -rf "$TMPDIR_EXTRACT"' EXIT + +echo "===========================================" +echo "Smoke-testing RPM: $RPM_FILE" +echo "===========================================" + +# Extract RPM +rpm2cpio "$RPM_FILE" | cpio -idmv -D "$TMPDIR_EXTRACT" 2>/dev/null || true +BIN=$(find "$TMPDIR_EXTRACT" -type f -name 'auth_server' | head -1) +if [ -z "$BIN" ]; then + echo "ERROR: auth_server binary not found inside $RPM_FILE" >&2 + exit 1 +fi +echo "Binary: $BIN" + +file "$BIN" +readelf -h "$BIN" | head -20 || true + +MAX_GLIBC=$(readelf -sW "$BIN" | grep -o 'GLIBC_[0-9][0-9.]*' | sed 's/^GLIBC_//' | sort -V | tail -n1 || true) +if [ -n "$MAX_GLIBC" ]; then + echo "Max GLIBC: GLIBC_$MAX_GLIBC" + if [ "$(printf '%s\n' "$MAX_GLIBC" "2.34" | sort -V | tail -n1)" != "2.34" ]; then + echo "ERROR: GLIBC $MAX_GLIBC > 2.34 — not compatible with Rocky Linux 9" >&2 + exit 1 + fi + echo "GLIBC version check PASSED ($MAX_GLIBC <= 2.34)" +fi + +INTERP=$(readelf -l "$BIN" | sed -n 's/^.*interpreter: \(.*\)]$/\1/p' || true) +if echo "$INTERP" | grep -q "/nix/store/"; then + echo "ERROR: ELF interpreter is in Nix store: $INTERP" >&2 + exit 1 +fi +echo "ELF interpreter: ${INTERP:-}" + +echo "===========================================" +echo "Smoke test PASSED for $RPM_FILE" +echo "===========================================" diff --git a/.github/scripts/release/get_version.sh b/.github/scripts/release/get_version.sh new file mode 100755 index 0000000..71e83e4 --- /dev/null +++ b/.github/scripts/release/get_version.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +# Extract version from workspace Cargo.toml +set -euo pipefail + +REPO_ROOT=$(cd "$(dirname "$0")/../../.." && pwd) +CARGO_TOML="$REPO_ROOT/Cargo.toml" + +if [ ! -f "$CARGO_TOML" ]; then + echo "Error: Cargo.toml not found at $CARGO_TOML" >&2 + exit 1 +fi + +# Extract version from [workspace.package] section +VERSION=$(grep -A 20 '^\[workspace\.package\]' "$CARGO_TOML" | grep '^version' | head -1 | sed -E 's/^version[[:space:]]*=[[:space:]]*"([^"]+)".*/\1/') + +if [ -z "$VERSION" ]; then + echo "Error: Could not extract version from $CARGO_TOML" >&2 + exit 1 +fi + +echo "$VERSION" diff --git a/.github/scripts/test/test_all.sh b/.github/scripts/test/test_all.sh new file mode 100755 index 0000000..9d3a7e6 --- /dev/null +++ b/.github/scripts/test/test_all.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +# Run all authentication server tests +set -euo pipefail + +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +REPO_ROOT=$(cd "$SCRIPT_DIR/../../.." && pwd) +source "$REPO_ROOT/.github/scripts/common.sh" +cd "$REPO_ROOT" + +echo "Running all authentication server tests…" + +# SQLite tests +bash "$SCRIPT_DIR/test_sqlite.sh" + +echo "All tests completed successfully." diff --git a/.github/scripts/test/test_docker_image.sh b/.github/scripts/test/test_docker_image.sh new file mode 100755 index 0000000..9c5d41b --- /dev/null +++ b/.github/scripts/test/test_docker_image.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# Smoke-test the Docker image for auth_server +set -euo pipefail + +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +REPO_ROOT=$(cd "$SCRIPT_DIR/../../.." && pwd) + +IMAGE_NAME="${DOCKER_IMAGE_NAME:-cosmian-auth-server:latest}" +PORT="${AUTH_SERVER_PORT:-9005}" + +echo "==========================================" +echo "Testing Docker image: $IMAGE_NAME" +echo "==========================================" + +# Start container in background +CID=$(docker run -d --rm -p "${PORT}:${PORT}" "$IMAGE_NAME" 2>/dev/null) +echo "Container ID: $CID" +trap 'docker stop "$CID" 2>/dev/null || true' EXIT + +# Wait for readiness +echo "Waiting for server to start…" +for i in $(seq 1 30); do + if curl -sf "http://127.0.0.1:${PORT}/health" >/dev/null 2>&1; then + echo "Server is ready (attempt $i)" + break + fi + sleep 1 +done + +# Basic health check +HTTP_CODE=$(curl -s -o /dev/null -w '%{http_code}' "http://127.0.0.1:${PORT}/health" || echo "000") +if [ "$HTTP_CODE" = "200" ]; then + echo "Health check PASSED (HTTP $HTTP_CODE)" +else + echo "WARNING: Health check returned HTTP $HTTP_CODE (server may still be functional)" +fi + +echo "Docker smoke test completed." diff --git a/.github/scripts/test/test_psql.sh b/.github/scripts/test/test_psql.sh new file mode 100755 index 0000000..9d06352 --- /dev/null +++ b/.github/scripts/test/test_psql.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +# Run authentication server tests with PostgreSQL backend +set -euo pipefail + +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +REPO_ROOT=$(cd "$SCRIPT_DIR/../../.." && pwd) +source "$REPO_ROOT/.github/scripts/common.sh" +cd "$REPO_ROOT" + +POSTGRES_HOST="${POSTGRES_HOST:-127.0.0.1}" +POSTGRES_PORT="${POSTGRES_PORT:-5432}" + +echo "==========================================" +echo "Running PostgreSQL backend tests" +echo " Host: $POSTGRES_HOST:$POSTGRES_PORT" +echo "==========================================" + +export TEST_POSTGRES_URL="postgresql://auth:auth@${POSTGRES_HOST}:${POSTGRES_PORT}/auth" + +cargo test --workspace --lib -- --nocapture + +echo "PostgreSQL tests completed." diff --git a/.github/scripts/test/test_sqlite.sh b/.github/scripts/test/test_sqlite.sh new file mode 100755 index 0000000..c1a3a91 --- /dev/null +++ b/.github/scripts/test/test_sqlite.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +# Run authentication server tests with SQLite backend +set -euo pipefail + +SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) +REPO_ROOT=$(cd "$SCRIPT_DIR/../../.." && pwd) +source "$REPO_ROOT/.github/scripts/common.sh" +cd "$REPO_ROOT" + +echo "==========================================" +echo "Running SQLite backend tests" +echo "==========================================" + +cargo test --workspace --lib -- --nocapture + +echo "SQLite tests completed." diff --git a/.github/workflows/cargo-publish.yml b/.github/workflows/cargo-publish.yml new file mode 100644 index 0000000..91a5536 --- /dev/null +++ b/.github/workflows/cargo-publish.yml @@ -0,0 +1,63 @@ +--- +name: Cargo Publish Workspace + +on: + workflow_call: + inputs: + toolchain: + required: true + type: string + secrets: + token: + required: true + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + tool-cache: false + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true + + - name: Manual cleanup for extra space + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /usr/local/lib/android + sudo rm -rf /opt/ghc + sudo rm -rf /opt/hostedtoolcache/CodeQL + sudo docker image prune --all --force + df -h + + - uses: actions/checkout@v6 + with: + submodules: recursive + + - uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ inputs.toolchain }} + components: rustfmt, clippy + + - name: Publishing - dry run + if: startsWith(github.ref, 'refs/tags/') != true + shell: bash + run: | + cargo publish --workspace --dry-run + + - name: Publishing + if: startsWith(github.ref, 'refs/tags/') + shell: bash + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.token }} + run: | + cargo publish --workspace + + - name: Check disk space after build + if: always() + run: df -h diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml new file mode 100644 index 0000000..7b13b6d --- /dev/null +++ b/.github/workflows/cla.yml @@ -0,0 +1,25 @@ +--- +name: CLA Assistant +on: + workflow_call: + +permissions: + actions: write + contents: write + pull-requests: write + statuses: write + +jobs: + cla-assistant: + runs-on: ubuntu-latest + steps: + - name: CLA Assistant + uses: contributor-assistant/github-action@v2.6.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} + with: + path-to-signatures: signatures/version1/cla.json + path-to-document: https://github.com/Cosmian/authentication/blob/main/CLA.md + branch: cla-signatures + allowlist: user1,bot* diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..6f85a87 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,17 @@ +--- +name: CI + +on: + push: + branches: + - main + - develop + pull_request: + workflow_dispatch: + +jobs: + ci: + uses: ./.github/workflows/main_base.yml + secrets: inherit + with: + toolchain: stable diff --git a/.github/workflows/main_base.yml b/.github/workflows/main_base.yml new file mode 100644 index 0000000..dfdd099 --- /dev/null +++ b/.github/workflows/main_base.yml @@ -0,0 +1,78 @@ +--- +name: CI checks + +on: + workflow_call: + inputs: + toolchain: + required: true + type: string + +jobs: + cla-assistant: + if: | + (github.event.pull_request.head.repo.full_name != github.repository) + && ((github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') + || github.event_name == 'pull_request_target') + uses: ./.github/workflows/cla.yml + secrets: inherit + + cargo-deny: + uses: Cosmian/reusable_workflows/.github/workflows/cargo-audit.yml@develop + name: Security Audit + with: + toolchain: ${{ inputs.toolchain }} + + cargo-machete: + uses: Cosmian/reusable_workflows/.github/workflows/cargo-machete.yml@develop + with: + toolchain: ${{ inputs.toolchain }} + + cargo-clippy: + name: Cargo test - ${{ matrix.runner }} + runs-on: ${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + runner: [ubuntu-24.04, ubuntu-24.04-arm, macos-15] + + steps: + - uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ inputs.toolchain }} + components: rustfmt, clippy + + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Run unit tests + run: cargo fmt --all -- --check --color always + + - name: Static analysis + run: cargo clippy --workspace --all-targets -- -D warnings + + # - name: Static analysis all features + # run: cargo clippy --workspace --all-targets --no-default-features --features database,rustls -- -D warnings + + cargo-test: + name: Cargo test - ${{ matrix.runner }} + runs-on: ${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + runner: [ubuntu-24.04, ubuntu-24.04-arm, macos-15] + + steps: + - uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ inputs.toolchain }} + components: rustfmt, clippy + + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Run unit tests + run: | + cargo test --workspace --lib -- --nocapture diff --git a/.github/workflows/packaging-docker.yml b/.github/workflows/packaging-docker.yml new file mode 100644 index 0000000..cff7679 --- /dev/null +++ b/.github/workflows/packaging-docker.yml @@ -0,0 +1,236 @@ +--- +name: Packaging - Docker + +on: + workflow_call: + inputs: + toolchain: + required: true + type: string + default: stable + workflow_dispatch: + inputs: + toolchain: + description: Rust toolchain to use (e.g., stable, 1.86.0) + required: true + type: string + default: stable + +jobs: + nix-docker-image: + name: docker-${{ matrix.runner }} + runs-on: ${{ matrix.runner }} + permissions: + contents: read + packages: write + id-token: write # Required for cosign keyless signing + strategy: + fail-fast: false + matrix: + runner: [ubuntu-24.04, ubuntu-24.04-arm] + + steps: + - name: Derive architecture from runner + id: arch + run: | + set -euo pipefail + if [[ "${{ matrix.runner }}" == *arm* ]]; then + echo "arch=arm64" >> "$GITHUB_OUTPUT" + echo "platform=linux/arm64" >> "$GITHUB_OUTPUT" + echo "suffix=-arm64" >> "$GITHUB_OUTPUT" + else + echo "arch=amd64" >> "$GITHUB_OUTPUT" + echo "platform=linux/amd64" >> "$GITHUB_OUTPUT" + echo "suffix=-amd64" >> "$GITHUB_OUTPUT" + fi + + - name: Nix installation + uses: cachix/install-nix-action@v31 + with: + extra_nix_config: | + connect-timeout = 15 + stalled-download-timeout = 15 + narinfo-cache-negative-ttl = 0 + + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Warm Nix store + run: | + sed -n 's/.*url *= *"\(https:\/\/[^"]*\.tar\.gz\)".*/\1/p' default.nix | sort -u | while read -r url; do + dest="/tmp/$(basename "$url")" + fallback=$(echo "$url" | sed 's|https://package.cosmian.com/nixpkgs/|https://github.com/NixOS/nixpkgs/archive/|') + echo "Downloading $(basename "$url") …" + if ! curl --retry 3 --retry-delay 5 --retry-all-errors -fsSL -o "$dest" "$url"; then + echo "Mirror failed, falling back to GitHub …" + curl --retry 5 --retry-delay 10 --retry-max-time 300 --retry-all-errors -fsSL -o "$dest" "$fallback" + fi + nix-prefetch-url --unpack --name source "file://$dest" + rm -f "$dest" + done + + - name: Login to GHCR + uses: docker/login-action@v4 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Install Cosign + uses: sigstore/cosign-installer@v4.1.1 + + - name: Extract base Docker metadata + id: meta + uses: docker/metadata-action@v6 + with: + images: ghcr.io/cosmian/auth-server + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + + - name: Append arch suffix to tags + id: arch_tags + run: | + set -euo pipefail + echo "tags_with_arch<> $GITHUB_OUTPUT + while IFS= read -r t; do + [ -n "$t" ] && echo "${t}${{ steps.arch.outputs.suffix }}" >> $GITHUB_OUTPUT + done <<< "${{ steps.meta.outputs.tags }}" + echo "EOF" >> $GITHUB_OUTPUT + + - name: Build and load Docker image + id: build + run: | + bash .github/scripts/nix.sh docker --load + + - name: Get image name and digest + id: image_info + run: | + set -euo pipefail + IMAGE_INFO=$(docker images --format "{{.Repository}}:{{.Tag}} {{.ID}}" | grep "cosmian-auth-server" | head -n1) + IMAGE_NAME=$(echo "$IMAGE_INFO" | awk '{print $1}') + IMAGE_ID=$(echo "$IMAGE_INFO" | awk '{print $2}') + echo "Original image: $IMAGE_NAME (ID: $IMAGE_ID)" + echo "image_name=$IMAGE_NAME" >> $GITHUB_OUTPUT + echo "image_id=$IMAGE_ID" >> $GITHUB_OUTPUT + + - name: Tag and push images + id: push + env: + TAGS: ${{ steps.arch_tags.outputs.tags_with_arch }} + IMAGE_NAME: ${{ steps.image_info.outputs.image_name }} + run: | + set -euo pipefail + DIGEST="" + for tag in ${TAGS}; do + echo "Tagging and pushing: $tag" + docker tag "$IMAGE_NAME" "$tag" + docker push "$tag" + # Capture digest from the first push + if [ -z "$DIGEST" ]; then + DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' "$tag" | cut -d'@' -f2) + echo "digest=$DIGEST" >> $GITHUB_OUTPUT + fi + done + + - name: Sign Docker images with Cosign + env: + DIGEST: ${{ steps.push.outputs.digest }} + TAGS: ${{ steps.arch_tags.outputs.tags_with_arch }} + run: | + images="" + for tag in ${TAGS}; do + images+="${tag}@${DIGEST} " + done + cosign sign --yes ${images} + + - name: Select test tag + id: pick_test_tag + run: | + first=$(echo "${{ steps.arch_tags.outputs.tags_with_arch }}" | head -n1) + echo "test_tag=$first" >> $GITHUB_OUTPUT + # Make the GHCR-tagged name available locally for the test script + docker tag "${{ steps.image_info.outputs.image_name }}" "$first" + + - name: Test Docker image + env: + DOCKER_IMAGE_NAME: ${{ steps.pick_test_tag.outputs.test_tag }} + run: | + bash .github/scripts/test/test_docker_image.sh + + - name: Select tag for output + id: pick + run: | + first=$(echo "${{ steps.arch_tags.outputs.tags_with_arch }}" | head -n1) + echo "first_tag=$first" >> $GITHUB_OUTPUT + + outputs: + arch-tag: ${{ steps.pick.outputs.first_tag }} + arch: ${{ steps.arch.outputs.arch }} + + nix-docker-manifest: + name: docker-manifest + needs: nix-docker-image + runs-on: ubuntu-24.04 + permissions: + contents: read + packages: write + id-token: write # Required for cosign keyless signing + env: + REGISTRY_IMAGE: ghcr.io/cosmian/auth-server + + steps: + - name: Login to GHCR + uses: docker/login-action@v4 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Install Cosign + uses: sigstore/cosign-installer@v4.1.1 + + - name: Set up Buildx + uses: docker/setup-buildx-action@v4 + + - name: Compute tags + id: meta + uses: docker/metadata-action@v6 + with: + images: ${{ env.REGISTRY_IMAGE }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + + - name: Create and push manifest list + id: manifest + run: | + set -euo pipefail + echo "Tags to manifest:"; echo "${{ steps.meta.outputs.tags }}" + while IFS= read -r tag; do + [ -z "$tag" ] && continue + echo "Creating manifest for $tag" + docker buildx imagetools create \ + --tag "$tag" \ + "${tag}-amd64" \ + "${tag}-arm64" + # Get the manifest digest + DIGEST=$(docker buildx imagetools inspect "$tag" --format '{{json .Manifest}}' | jq -r '.digest') + echo "manifest_digest_${tag##*:}=$DIGEST" >> $GITHUB_OUTPUT + docker buildx imagetools inspect "$tag" + done <<< "${{ steps.meta.outputs.tags }}" + + - name: Sign multi-arch manifest with Cosign + env: + TAGS: ${{ steps.meta.outputs.tags }} + run: | + images="" + for tag in ${TAGS}; do + images+="${tag} " + done + cosign sign --yes ${images} diff --git a/.github/workflows/packaging-tests.yml b/.github/workflows/packaging-tests.yml new file mode 100644 index 0000000..120d44e --- /dev/null +++ b/.github/workflows/packaging-tests.yml @@ -0,0 +1,173 @@ +--- +name: Packaging - Tests + +on: + workflow_call: + inputs: + toolchain: + required: true + type: string + default: stable + workflow_dispatch: + inputs: + toolchain: + description: Rust toolchain to use (e.g., stable, 1.86.0) + required: true + type: string + default: stable + +jobs: + packages-test: + name: In Docker ${{ matrix.container }}-${{ matrix.link }}${{ matrix.runner == 'ubuntu-24.04-arm' && '-ARM' || '-AMD64' }} + runs-on: ${{ matrix.runner }} + container: ${{ matrix.container }} + strategy: + fail-fast: false + matrix: + container: + # Ubuntu LTS releases + - ubuntu:25.04 + - ubuntu:24.04 + - ubuntu:22.04 + # Debian stable + - debian:trixie-slim + - debian:bookworm-slim + # Rocky Linux releases + - rockylinux/rockylinux:10 + - rockylinux/rockylinux:9 + link: [static, dynamic] + runner: [ubuntu-24.04, ubuntu-24.04-arm] + + steps: + - name: Download package artifacts + uses: actions/download-artifact@v8 + with: + name: ${{ matrix.link }}_${{ matrix.runner }}-release + + - name: List downloaded artifacts + run: find . + + - name: Install package + shell: bash + run: | + set -ex + if [[ "${{ matrix.container }}" == rockylinux* ]]; then + for pkg in ./result-rpm-${{ matrix.link }}/*.rpm; do + rpm -qpl "$pkg" + done + rpm -i ./result-rpm-${{ matrix.link }}/*.rpm --nodeps + else + for pkg in ./result-deb-${{ matrix.link }}/*.deb; do + dpkg --contents "$pkg" + done + if [ "${{ matrix.link }}" = "dynamic" ]; then + dpkg -i ./result-deb-${{ matrix.link }}/*.deb || true + apt-get update -qq && apt-get install -f -y + else + dpkg -i ./result-deb-${{ matrix.link }}/*.deb + fi + fi + + - name: Check server binary + shell: bash + run: | + set -ex + BIN="/usr/sbin/auth_server" + if [ ! -x "$BIN" ]; then + BIN=$(find /usr -name 'auth_server' -type f 2>/dev/null | head -1) + fi + [ -x "$BIN" ] || { echo "ERROR: auth_server not found"; exit 1; } + ldd "$BIN" || true + "$BIN" --version || "$BIN" --help || true + + - name: Verify GLIBC compatibility + shell: bash + run: | + set -ex + BIN=$(find /usr -name 'auth_server' -type f 2>/dev/null | head -1) + if command -v readelf >/dev/null 2>&1 && [ -n "$BIN" ]; then + MAX_GLIBC=$(readelf -sW "$BIN" | grep -o 'GLIBC_[0-9][0-9.]*' | sed 's/^GLIBC_//' | sort -V | tail -n1 || true) + echo "Max GLIBC: ${MAX_GLIBC:-none}" + if [ -n "$MAX_GLIBC" ]; then + if [ "$(printf '%s\n' "$MAX_GLIBC" "2.34" | sort -V | tail -n1)" != "2.34" ]; then + echo "ERROR: GLIBC $MAX_GLIBC > 2.34 — not Rocky Linux 9 compatible" && exit 1 + fi + echo "GLIBC check PASSED ($MAX_GLIBC <= 2.34)" + fi + fi + + - name: Run executable file + shell: bash + run: | + set -ex + BIN="/usr/sbin/auth_server" + if [ ! -x "$BIN" ]; then + BIN=$(find /usr -name 'auth_server' -type f 2>/dev/null | head -1) + fi + # auth_server requires a config file; running without one must exit + # with a meaningful error — not a linker/runtime crash. + output=$("$BIN" 2>&1 || true) + echo "$output" + echo "$output" | grep -qiE 'failed to read|auth_server\.toml|configuration' \ + || { echo "ERROR: unexpected output from auth_server"; exit 1; } + + systemd-packages-test: + name: Use GH runners ${{ matrix.link }}${{ matrix.runner == 'ubuntu-24.04-arm' && '-ARM' || '-AMD64' }} + runs-on: ${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + link: [static, dynamic] + runner: [ubuntu-24.04, ubuntu-24.04-arm] + + steps: + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Download package artifacts + uses: actions/download-artifact@v8 + with: + name: ${{ matrix.link }}_${{ matrix.runner }}-release + + - name: List downloaded artifacts + run: find . + + - name: Install package + shell: bash + run: | + set -ex + # DEB-based (Ubuntu/Debian); systemd tests run on Ubuntu GH runners only + for pkg in ./result-deb-${{ matrix.link }}/*.deb; do + dpkg --contents "$pkg" + done + if [ "${{ matrix.link }}" = "dynamic" ]; then + # Dynamic builds depend on system libs; fix with apt after attempt + sudo dpkg -i ./result-deb-${{ matrix.link }}/*.deb || true + sudo apt-get update -qq && sudo apt-get install -f -y + else + sudo dpkg -i ./result-deb-${{ matrix.link }}/*.deb + fi + + - name: Pre-check server binary + shell: bash + run: | + set -ex + BIN="/usr/sbin/auth_server" + if [ ! -f "$BIN" ]; then + BIN=$(find /usr/sbin /usr/bin /usr/local/bin -name 'auth_server' -type f 2>/dev/null | head -1) + fi + [ -f "$BIN" ] || { echo "ERROR: auth_server not found"; exit 1; } + sudo ldd "$BIN" || true + # Verify the binary executes and exits with a config error (not a crash) + # Binary is installed root-only (-r-x------); run via sudo + output=$(sudo "$BIN" 2>&1 || true) + echo "$output" + echo "$output" | grep -qiE 'failed to read|auth_server\.toml|configuration' \ + || { echo "ERROR: unexpected output from auth_server"; exit 1; } + + - name: systemd-analyze security auth_server.service + shell: bash + run: | + set -ex + systemd-analyze security auth_server.service diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml new file mode 100644 index 0000000..a2b606f --- /dev/null +++ b/.github/workflows/packaging.yml @@ -0,0 +1,188 @@ +--- +name: Packaging + +on: + workflow_call: + inputs: + toolchain: + required: true + type: string + default: stable + workflow_dispatch: + inputs: + toolchain: + description: Rust toolchain to use (e.g., stable, 1.86.0) + required: true + type: string + default: stable + +jobs: + docker: + uses: ./.github/workflows/packaging-docker.yml + with: + toolchain: ${{ inputs.toolchain }} + permissions: + contents: read + packages: write + id-token: write + if: | + (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]') + || startsWith(github.ref, 'refs/tags/') + || github.event_name == 'workflow_dispatch' + + packages: + name: ${{ matrix.runner }}-${{ matrix.link }} + runs-on: ${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + link: [static, dynamic] + runner: [ubuntu-24.04, ubuntu-24.04-arm, macos-15] + + steps: + - name: Nix installation + uses: cachix/install-nix-action@v31 + with: + extra_nix_config: | + connect-timeout = 15 + stalled-download-timeout = 15 + narinfo-cache-negative-ttl = 0 + + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Warm Nix store + run: | + sed -n 's/.*url *= *"\(https:\/\/[^"]*\.tar\.gz\)".*/\1/p' default.nix | sort -u | while read -r url; do + dest="/tmp/$(basename "$url")" + fallback=$(echo "$url" | sed 's|https://package.cosmian.com/nixpkgs/|https://github.com/NixOS/nixpkgs/archive/|') + echo "Downloading $(basename "$url") …" + if ! curl --retry 3 --retry-delay 5 --retry-all-errors -fsSL -o "$dest" "$url"; then + echo "Mirror failed, falling back to GitHub …" + curl --retry 5 --retry-delay 10 --retry-max-time 300 --retry-all-errors -fsSL -o "$dest" "$fallback" + fi + nix-prefetch-url --unpack --name source "file://$dest" + rm -f "$dest" + done + + - name: Set up GPG + uses: crazy-max/ghaction-import-gpg@v7 + with: + gpg_private_key: ${{ secrets.GPG_SIGNING_KEY }} + passphrase: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }} + + - name: List GPG keys + run: gpg -K + + - name: Install cargo-packager (macOS DMG builds) + if: runner.os == 'macOS' + run: cargo install --locked cargo-packager + + - name: Package with GPG signature + run: | + bash .github/scripts/nix.sh --link ${{ matrix.link }} package + env: + GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }} + GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }} + + - name: Upload package + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.link }}_${{ matrix.runner }}-release + path: result-*-${{ matrix.link }}/* + retention-days: 1 + if-no-files-found: error + + publish-release: + if: | + (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]') + || startsWith(github.ref, 'refs/tags/') + || github.event_name == 'workflow_dispatch' + + name: Publish ${{ matrix.runner }}-${{ matrix.link }} + needs: packages + concurrency: + group: publish-release + cancel-in-progress: false + runs-on: [self-hosted, not-sgx] + container: + image: cosmian/docker_doc_ci + volumes: + - /home/cosmian/.ssh/id_rsa:/root/.ssh/id_rsa + + strategy: + fail-fast: false + max-parallel: 1 + matrix: + link: [static, dynamic] + runner: [ubuntu-24.04, ubuntu-24.04-arm, macos-15] + package-type: [deb, rpm, dmg] + cpu-arch: [amd64, arm64] + exclude: + - runner: ubuntu-24.04 + package-type: dmg + - runner: ubuntu-24.04-arm + package-type: dmg + - runner: macos-15 + package-type: deb + - runner: macos-15 + package-type: rpm + - runner: ubuntu-24.04 + cpu-arch: arm64 + - runner: ubuntu-24.04-arm + cpu-arch: amd64 + - runner: macos-15 + cpu-arch: arm64 + + steps: + - name: Download artifacts + uses: actions/download-artifact@v8 + with: + name: ${{ matrix.link }}_${{ matrix.runner }}-release + + - name: Display structure + run: find . -type f | head -40 + + - name: Push packages + shell: bash + run: | + set -ex + if [[ "${GITHUB_REF}" =~ 'refs/tags/' ]]; then + VERSION="${GITHUB_REF_NAME}" + else + VERSION="last_build/${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" + fi + DESTINATION_DIR="/mnt/package/authentication/${VERSION}" + ssh -o 'StrictHostKeyChecking no' -i /root/.ssh/id_rsa \ + cosmian@package.cosmian.com mkdir -p "$DESTINATION_DIR" + for ext in deb rpm dmg; do + for f in $(find . -name "*.${ext}" 2>/dev/null); do + scp -o 'StrictHostKeyChecking no' -i /root/.ssh/id_rsa \ + "$f" "cosmian@package.cosmian.com:${DESTINATION_DIR}/" + [ -f "${f}.sha256" ] && scp -o 'StrictHostKeyChecking no' -i /root/.ssh/id_rsa \ + "${f}.sha256" "cosmian@package.cosmian.com:${DESTINATION_DIR}/" || true + [ -f "${f}.asc" ] && scp -o 'StrictHostKeyChecking no' -i /root/.ssh/id_rsa \ + "${f}.asc" "cosmian@package.cosmian.com:${DESTINATION_DIR}/" || true + done + done + + - name: Publish GitHub Release assets + if: startsWith(github.ref, 'refs/tags/') && matrix.link == 'static' + uses: softprops/action-gh-release@v3 + with: + files: | + result-${{ matrix.package-type }}-${{ matrix.link }}/* + + cargo-publish: + uses: ./.github/workflows/cargo-publish.yml + with: + toolchain: ${{ inputs.toolchain }} + secrets: + token: ${{ secrets.CRATES_IO }} + + tests: + needs: packages + uses: ./.github/workflows/packaging-tests.yml + with: + toolchain: 1.90.0 diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml new file mode 100644 index 0000000..125cc55 --- /dev/null +++ b/.github/workflows/pr.yml @@ -0,0 +1,19 @@ +--- +name: Packaging + +on: + push: + tags: + - '**' + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + workflow_call: + workflow_dispatch: + +jobs: + packaging: + if: github.event_name != 'pull_request' || github.event.pull_request.draft == false + uses: ./.github/workflows/packaging.yml + secrets: inherit + with: + toolchain: stable diff --git a/.gitignore b/.gitignore index 9f97022..691080b 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,3 @@ -target/ \ No newline at end of file +target/ +result* +.cargo_check diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..d0cf233 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,99 @@ +--- +# Pre-requisites: +# pip install pre-commit conventional-pre-commit +# pre-commit install +# pre-commit install --install-hooks -t commit-msg +# pre-commit autoupdate + +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +fail_fast: true +exclude: .pre-commit-config.yaml|target/ +repos: + - repo: https://github.com/compilerla/conventional-pre-commit + rev: v4.0.0 + hooks: + - id: conventional-pre-commit + stages: [commit-msg] + args: [] + + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.44.0 + hooks: + - id: markdownlint-fix + args: + [ + --disable=MD013, + --disable=MD033, + --disable=MD041, + --disable=MD046, + --fix, + ] + + - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt + rev: 0.2.3 + hooks: + - id: yamlfmt + args: [--mapping, "2", --sequence, "4", --offset, "2"] + + - repo: https://github.com/crate-ci/typos + rev: v1.31.1 + hooks: + - id: typos + exclude: nix/signing-keys/|nix/expected-hashes/ + + - repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.5.5 + hooks: + - id: remove-crlf + - id: forbid-crlf + - id: remove-tabs + exclude: ^.git/ + - id: forbid-tabs + exclude: ^.git/ + + - repo: https://github.com/sirosen/texthooks + rev: 0.6.8 + hooks: + - id: fix-smartquotes + - id: fix-ligatures + - id: fix-spaces + - id: forbid-bidi-controls + - id: macro-expand + + - repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: shellcheck + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-added-large-files + - id: check-ast + - id: check-byte-order-marker + - id: check-builtin-literals + - id: check-case-conflict + - id: check-docstring-first + - id: check-json + - id: check-merge-conflict + - id: check-symlinks + - id: check-toml + - id: check-vcs-permalinks + - id: check-xml + - id: check-yaml + - id: debug-statements + - id: destroyed-symlinks + - id: detect-private-key + exclude: server/tests/|nix/signing-keys/ + + - id: double-quote-string-fixer + - id: end-of-file-fixer + exclude: nix/expected-hashes/ + - id: mixed-line-ending + args: [--fix=lf] + exclude: nix/expected-hashes/ + - id: no-commit-to-branch + args: [--branch, main] + - id: trailing-whitespace + exclude: nix/expected-hashes/ diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..84bc5ac --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,249 @@ +# Cosmian Authentication Server — AI Agent Instructions + +> **Purpose of this file**: Single source of truth for any AI agent +> (Copilot, Cursor, Cline, Claude Code, etc.) working on the Cosmian Authentication +> Server codebase. It explains project structure, build commands, CI workflows, +> coding conventions, and troubleshooting steps. + +Cosmian Authentication Server is a high-performance authentication and session +management server written in **Rust**. It supports multiple database backends +(SQLite, PostgreSQL, MySQL), TOTP two-factor authentication, JWT-based sessions, +and a Redis session store. + +--- + +## 1. Build & test cheatsheet + +```bash +# ── Build ──────────────────────────────────────────────────────────────── +cargo build # default features (OpenSSL, database) +cargo build --features rustls # use rustls instead of OpenSSL + +# ── Test ───────────────────────────────────────────────────────────────── +cargo test --workspace --lib # run all library tests +cargo test -p auth_server # single crate + +# ── Lint ───────────────────────────────────────────────────────────────── +cargo clippy --workspace --all-targets -- -D warnings +cargo fmt --all -- --check + +# ── Run locally ────────────────────────────────────────────────────────── +cargo run --bin auth_server -- auth_server.toml + +# ── Smoke-test (expect 200 or 404, not 500) ───────────────────────────── +curl -s http://localhost:9005/health +``` + +### Pre-commit hooks + +Always install and never bypass pre-commit hooks: + +```sh +pip install pre-commit conventional-pre-commit +pre-commit install +pre-commit install --install-hooks -t commit-msg +``` + +Never use `git commit --no-verify` or `SKIP=...` to bypass hooks. Fix the +underlying issues instead. + +--- + +## 2. Workspace layout + +```text +client/ auth_client — authentication client library +server/ auth_server — server binary + lib + src/ + main.rs — binary entry point + lib.rs — library root + database/ — database trait and backends (SQLite, PostgreSQL, MySQL) + middleware/ — auth/JWT/session middleware + response/ — HTTP response types + server/ — server startup and config + session/ — session management + tests/ — integration tests + tls/ — TLS helpers + +nix/ Nix build expressions and expected vendor hashes + auth-server.nix — Nix derivation for auth_server binary + docker.nix — Docker image derivation + expected-hashes/ — expected sha256 hashes for reproducible builds + signing-keys/ — GPG public keys for package verification + +.github/ + scripts/ — CI and packaging scripts + common.sh — shared bash helpers + nix.sh — unified entrypoint for CI commands + release/ — version extraction and hash update scripts + package/ — DEB / RPM / DMG packaging scripts + test/ — test scripts (sqlite, psql, docker) + workflows/ — GitHub Actions workflows + reusable_scripts/ — git submodule: shared scripts with Cosmian/reusable_scripts + +default.nix — top-level Nix derivation (pins nixpkgs, builds auth-server) +shell.nix — Nix development shell +Cargo.toml — workspace manifest +``` + +--- + +## 3. Crate features + +| Feature | Default | Effect | +| ------------------ | ------- | ----------------------------------------------------------- | +| `openssl` | **on** | Use OpenSSL (vendored) for TLS; required for most deploys | +| `rustls` | off | Use rustls instead of OpenSSL | +| `database` | **on** | Compile all database backends (SQLite, PostgreSQL, MySQL) | +| `no_jwt_validation`| off | Skip JWT expiry/issuer checks — **dev/test only** | + +--- + +## 4. Key file map + +| Intent | File(s) | +| ------------------------------- | -------------------------------------------- | +| Server startup | `server/src/main.rs`, `server/src/lib.rs` | +| Server config struct | `server/src/server/` | +| HTTP routes & handlers | `server/src/server/` | +| Auth middleware (JWT, session) | `server/src/middleware/` | +| Database trait & backends | `server/src/database/` | +| Session management | `server/src/session/` | +| TOTP support | `server/src/totp.rs` | +| Nix derivation | `nix/auth-server.nix` | +| Nix top-level | `default.nix` | +| CI/packaging entrypoint | `.github/scripts/nix.sh` | +| Packaging scripts (DEB/RPM/DMG) | `.github/scripts/package/` | +| Test scripts | `.github/scripts/test/` | + +--- + +## 5. Nix derivation + +`nix/auth-server.nix` builds the `auth_server` binary targeting glibc 2.34 +(Rocky Linux 9 compatibility) on Linux. It uses: + +- **Pinned nixpkgs** `8b27c1239e5c421a2bbc2c65d52e4a6fbf2ff296` (matches KMS repo) +- **nixpkgs 22.05** (glibc 2.34) as the Linux stdenv +- **Rust 1.86.0** via rust-overlay +- **Vendored OpenSSL** (compiled during cargo build, no external OpenSSL needed) +- `cmake` and `perl` as native build inputs (required by `aws-lc-sys` and openssl crate) + +No FIPS / non-FIPS variants — the auth server has a single build variant. + +```bash +# Build static binary +nix-build -A auth-server-static + +# Build dynamic binary +nix-build -A auth-server-dynamic + +# Build Docker image (Linux only) +nix-build -A docker-image +``` + +--- + +## 6. Packaging + +```bash +# Full packaging via nix.sh: +bash .github/scripts/nix.sh --link static package # DEB + RPM on Linux, DMG on macOS +bash .github/scripts/nix.sh --link static package deb # DEB only +bash .github/scripts/nix.sh --link static package rpm # RPM only +bash .github/scripts/nix.sh --link static package dmg # DMG only (macOS) + +# Docker (Linux only): +bash .github/scripts/nix.sh docker --load +``` + +### Expected hashes (`nix/expected-hashes/`) + +| File | Purpose | +| ------------------------------------------- | ------------------------------------------ | +| `server.vendor.static.sha256` | Cargo vendor hash for static builds | +| `server.vendor.dynamic.sha256` | Cargo vendor hash for dynamic builds | +| `auth-server....sha256` | Expected binary hash for determinism check | + +When `Cargo.lock` changes, the vendor hashes become stale. Regenerate: + +```bash +# Put fake hash, run build, read "got:" error, paste correct hash +echo "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=" > nix/expected-hashes/server.vendor.static.sha256 +nix-build -A auth-server-static 2>&1 | grep "got:" +``` + +--- + +## 7. CI overview + +All CI runs go through `.github/scripts/nix.sh`: + +```bash +bash .github/scripts/nix.sh [--link static|dynamic] COMMAND [args] +``` + +| Command | Description | +| -------------------- | ----------------------------------------------- | +| `test` | Run all tests in nix-shell | +| `test sqlite` | SQLite backend tests only | +| `test psql` | PostgreSQL backend tests (requires server) | +| `package` | Build all packages for this platform | +| `package deb` | Build Debian package | +| `package rpm` | Build RPM package | +| `package dmg` | Build macOS DMG (macOS only) | +| `docker [opts]` | Build Docker image tarball (Linux only) | +| `update-hashes` | Regenerate expected binary hashes | + +### Workflow files + +| Workflow | Purpose | +| ------------------------- | ---------------------------------------------------- | +| `main.yml` | Push/PR trigger; calls `main_base.yml` | +| `main_base.yml` | clippy, cargo-deny, cargo-test, packaging | +| `packaging.yml` | Multi-platform packaging (Linux/ARM/macOS) + Docker | +| `packaging-tests.yml` | Install packages in Docker containers and verify | + +### Database test environment + +For PostgreSQL tests: + +| Variable | Value | +| ---------------- | ------------------------------------------ | +| `POSTGRES_HOST` | `127.0.0.1` | +| `POSTGRES_PORT` | `5432` | + +--- + +## 8. GitHub CLI — reading issues, PRs, and CI failures + +**Always use `GH_PAGER=cat`** to prevent interactive pager. The repository is +`Cosmian/authentication`. + +```bash +GH_PAGER=cat gh issue view --repo Cosmian/authentication +GH_PAGER=cat gh pr view --repo Cosmian/authentication +GH_PAGER=cat gh pr checks --repo Cosmian/authentication +GH_PAGER=cat gh run view --repo Cosmian/authentication --log-failed +``` + +--- + +## 9. Coding rules + +- **Function length**: keep functions under 100 lines; extract helpers for longer ones. +- **Imports**: Rust `use` statements go at the top of each file, never inline. +- **Error handling**: never ignore or skip errors in tests or builds — investigate and fix. +- **Commit scope**: minimal, focused changes. Do not refactor surrounding code alongside a bug fix. + +--- + +## 10. Common issues + +| Symptom | Cause | Fix | +| -------------------------------------------------------- | -------------------------------------------------- | ---------------------------------------------------- | +| `aws-lc-sys` / `cmake` build failure | Missing cmake/go in build env | Add `cmake` and `go` to nativeBuildInputs / shell.nix | +| Stale Nix vendor hashes after `Cargo.lock` change | Expected hash is outdated | Regenerate with fake-hash trick (see §6) | +| `tokenExpired` / JWT validation error | Feature `no_jwt_validation` disabled in prod | Check configuration; check token TTL | +| `gh` command hangs | Interactive pager opened | Use `GH_PAGER=cat gh ...` | +| Rocky Linux GLIBC compatibility error | Binary compiled against glibc > 2.34 | Ensure pkgs234 (glibc 2.34) stdenv is used in Nix | diff --git a/CHANGELOG/ci_add_packaging.md b/CHANGELOG/ci_add_packaging.md new file mode 100644 index 0000000..7066958 --- /dev/null +++ b/CHANGELOG/ci_add_packaging.md @@ -0,0 +1,21 @@ +## Bug Fixes + +- Remove unused `tokio` dev-dependency from `auth_client` and unused `base32` dependency from `auth_server` (cargo-machete) +- Add `.cargo/audit.toml` ignoring RUSTSEC-2023-0071 (`rsa` Marvin Attack — transitive via `sqlx-mysql`, no upstream fix available) +- Fix `packaging.yml` job `if` conditions: remove `github.event_name == 'workflow_call'` guards (inside a reusable workflow `github.event_name` reflects the caller's original event, not `workflow_call`); `publish-release` now only runs on tag pushes +- Fix GPG signing failure in all packaging jobs: `build_deb`/`build_rpm`/`package_dmg.sh` set `export HOME="${TMPDIR}"` for Cargo, causing GPG to use a fresh empty keyring; fix by re-importing the key from `$GPG_SIGNING_KEY` with passphrase in `gpg_sign_file()` and the DMG inline signing block +- Fix `aws-lc-sys v0.39.1` build failure on aarch64 Linux: `pkgs234` (nixpkgs 22.05) defaults to gcc-9.3.0 on aarch64 which is rejected due to a memcmp bug ([GCC PR#95189](https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95189)); use `platform.gcc11` instead in `nix/auth-server.nix` buildPhase CC/CXX exports on aarch64 + +## CI + +- Add Nix-based CI/CD infrastructure for Cosmian Authentication Server: `default.nix`, `shell.nix`, `nix/auth-server.nix`, `nix/docker.nix`, pinned nixpkgs (glibc 2.34, Rocky Linux 9 compatibility) and rust-overlay (Rust 1.94.1) +- Add packaging scripts: `nix.sh` (main CI entrypoint), `package_common.sh`, `package_deb.sh`, `package_rpm.sh`, `package_dmg.sh` with GPG signing support +- Add smoke test scripts for DEB, RPM, DMG packages; add Docker image test script +- Add packaging workflows: `.github/workflows/packaging.yml` (DEB/RPM/DMG/Docker, Linux AMD64/ARM, macOS) and `.github/workflows/packaging-tests.yml` (install tests across Ubuntu/Debian/Rocky Linux containers) +- Add `.github/workflows/main_base.yml` reusable workflow: clippy, cargo-deny, cargo-machete, cargo test matrix, packaging +- Add `[package.metadata.deb]`, `[package.metadata.generate-rpm]`, and `[package.metadata.packager]` to `server/Cargo.toml` for DEB, RPM, and DMG packaging +- Add `pkg/auth_server.service` systemd unit and `pkg/deb/postinst` install script +- Add `.pre-commit-config.yaml` (mirrors KMS repo) and install pre-commit hooks on `ci/add_packaging` branch +- Add `AGENTS.md` following the KMS repository structure +- Pin rust-overlay to commit `a313afc` (Rust 1.94.1); use `cargoLock.lockFile` with `outputHashes` for `cosmian_logger` git dependency; set `auditable = false` to bypass cargo-auditable Rust 2024 edition limitation +- Record darwin arm64 static binary hash in `nix/expected-hashes/auth-server.static.arm64.darwin.sha256` diff --git a/Cargo.lock b/Cargo.lock index 7163f9e..37b5334 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -293,7 +293,6 @@ dependencies = [ "serde", "serde_json", "thiserror 2.0.18", - "tokio", "url", ] @@ -307,7 +306,6 @@ dependencies = [ "argon2", "async-trait", "auth_client", - "base32", "base64", "chrono", "cosmian_logger", @@ -608,7 +606,8 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cosmian_logger" version = "0.7.0" -source = "git+https://github.com/Cosmian/http_client_server.git?branch=develop#72d420e8ec2ad732bebc28e16af3f98ba3cec2da" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c4ea19c5444482c327d94474b9c7d81d5c6a9137f49b3e5e5ff35493198ccb" dependencies = [ "opentelemetry", "opentelemetry-otlp", @@ -2574,7 +2573,7 @@ dependencies = [ "security-framework", "security-framework-sys", "webpki-root-certs", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -2585,9 +2584,9 @@ checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" [[package]] name = "rustls-webpki" -version = "0.103.11" +version = "0.103.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20a6af516fea4b20eccceaf166e8aa666ac996208e8a644ce3ef5aa783bc7cd4" +checksum = "8279bb85272c9f10811ae6a6c547ff594d6a7f3c6c6b02ee9726d1d0dcfcdd06" dependencies = [ "aws-lc-rs", "ring", diff --git a/Cargo.toml b/Cargo.toml index 42766f9..2776c10 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,78 +14,35 @@ resolver = "2" [workspace.dependencies] actix-cors = "0.7.1" -actix-service = "2.0" actix-tls = "3.4" actix-web = { version = "4.11", default-features = false, features = [ "macros", "openssl", ] } -alcoholic_jwt = "4091" argon2 = "0.4.1" -assert_matches = "1.5.0" async-trait = "0.1" -backon = "1.6.0" -base32 = "0.5" base64 = "0.22" -bb8-redis = "0.24.0" -bitflags = "2.10" chrono = { version = "0.4", features = ["serde"] } -clap = { version = "4.5", default-features = false } -cmpv2 = { version = "0.2" } -cms = { version = "0.2.1" } -color-eyre = "0.6.2" cookie_store = "0.22.1" -# TODO: fix the "rev" to ensure reproducible build of docker images, for both "cosmian_crypto_core" and "cosmian_logger" -cosmian_logger = { git = "https://github.com/Cosmian/http_client_server.git", branch = "develop", package = "cosmian_logger" } -der = "0.7.6" -derive_builder = "0.20.2" -derive_more = "2.0" -directories = "6.0" -enum_dispatch = "0.3.13" -eyre = "0.6.12" -flate2 = "1" +cosmian_logger = "0.7.0" futures = "0.3.31" -hei-lib = { path = "crate/hei/hei-lib" } -hei-cli = { path = "crate/hei/hei-cli" } hex = { version = "0.4.3", features = ["serde"] } -humantime = "2.1.0" -humantime-serde = "1.1.1" -iso8601 = { version = "0.6", features = ["serde"] } -itertools = "0.14.0" jsonwebtoken = "10.3.0" -num_cpus = "1.17.0" openssl = { version = "0.10" } -p256 = "0.13" -rand = "0.9.1" redis = { version = "0.32.6", features = ["aio", "tokio-comp", "bb8"] } reqwest = { version = "0.13.2" } -rsa = "0.9" rustls = "0.23.35" -seq-macro = "0.3.6" serde = { version = "1.0", features = ["derive"] } -serde_bytes = "0.11.19" serde_json = "1.0" -serial_test = "3.2.0" -sha1 = "0.10" sha2 = "0.10.9" -spki = "0.7" sqlx = { version = "0.8.6", features = [ "runtime-tokio", "postgres", ], default-features = false } -strum = { version = "0.26.3", features = ["derive"] } -tar = "0.4" thiserror = "2.0.11" # It is important to keep this version in sync with that of actix-web # use `cargo tree -i tokio` to check there is no other version of tokio tokio = { version = "1.48.0", features = ["full"] } -tokio-util = "0.7.18" totp-rs = "5.7.1" toml = "0.9.8" -tracing = "0.1" url = { version = "2.5", features = ["serde"] } -utoipa = { version = "5.4.0", features = ["actix_extras"] } -utoipa-swagger-ui = { version = "9.0.2", features = ["actix-web"] } -x509-cert = { version = "0.2" } -x509-tsp = "0.1" -zeroize = "1.8" diff --git a/client/Cargo.toml b/client/Cargo.toml index 806fc17..df50516 100644 --- a/client/Cargo.toml +++ b/client/Cargo.toml @@ -6,7 +6,7 @@ rust-version.workspace = true authors.workspace = true license.workspace = true repository.workspace = true -description = "Authentication Client – shared models, DTOs and HTTP client for the Auth Authentication Server" +description = "Authentication Client - shared models, DTOs and HTTP client for the Auth Authentication Server" [features] default = [] @@ -35,4 +35,3 @@ url = { workspace = true } path = "src/lib.rs" [dev-dependencies] -tokio = { workspace = true } diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..126e80a --- /dev/null +++ b/default.nix @@ -0,0 +1,203 @@ +{ + # Pin nixpkgs so nix-build works without '-I nixpkgs=…' or channels. + # Linux builds target glibc 2.34 (Rocky Linux 9 compatibility). + pkgs ? + let + rustOverlay = import ( + builtins.fetchTarball { + url = "https://github.com/oxalica/rust-overlay/archive/a313afc75b85fc77ac154bf0e62c36f68361fd0b.tar.gz"; + sha256 = "0fb18ysw2dgm3033kcv3nlhsihckssnq6j5ayq4zjq148f12m7yv"; + } + ); + nixpkgsSrc = builtins.fetchTarball { + url = "https://package.cosmian.com/nixpkgs/8b27c1239e5c421a2bbc2c65d52e4a6fbf2ff296.tar.gz"; + sha256 = "sha256-CqCX4JG7UiHvkrBTpYC3wcEurvbtTADLbo3Ns2CEoL8="; + }; + in + import nixpkgsSrc { + overlays = [ rustOverlay ]; + config.allowUnfree = true; + }, +}: + +let + # Pinned nixpkgs tarball (same commit as above) + nixpkgsSrc = builtins.fetchTarball { + url = "https://package.cosmian.com/nixpkgs/8b27c1239e5c421a2bbc2c65d52e4a6fbf2ff296.tar.gz"; + sha256 = "sha256-CqCX4JG7UiHvkrBTpYC3wcEurvbtTADLbo3Ns2CEoL8="; + }; + + # Modern Rust toolchain via rust-overlay (pinned to commit a313afc, includes Rust 1.94.1) + rustOverlay = import ( + builtins.fetchTarball { + url = "https://github.com/oxalica/rust-overlay/archive/a313afc75b85fc77ac154bf0e62c36f68361fd0b.tar.gz"; + sha256 = "0fb18ysw2dgm3033kcv3nlhsihckssnq6j5ayq4zjq148f12m7yv"; + } + ); + + pkgsWithRust = import nixpkgsSrc { + overlays = [ rustOverlay ]; + config.allowUnfree = true; + }; + + # Latest stable Rust toolchain from the pinned overlay (currently 1.94.1). + # Using 'latest' ensures we always use the most recent stable in the pinned overlay. + rustToolchain = pkgsWithRust.rust-bin.stable.latest.minimal.override { + extensions = [ + "rustfmt" + "clippy" + ]; + }; + + # For Linux, pin nixpkgs 22.05 (glibc 2.34) for Rocky Linux 9 compatibility. + pkgs234 = + if pkgs.stdenv.isLinux then + import (builtins.fetchTarball { + url = "https://package.cosmian.com/nixpkgs/380be19fbd2d9079f677978361792cb25e8a3635.tar.gz"; + sha256 = "sha256-Zffu01pONhs/pqH07cjlF10NnMDLok8ix5Uk4rhOnZQ="; + }) { config.allowUnfree = true; } + else + pkgs; + + # pkgs234.makeRustPlatform (nixpkgs 22.05) has two bugs for git deps with + # workspace inheritance (version.workspace = true, added in Cargo 1.64): + # + # Bug 1: import-cargo-lock.nix is called with `{}` (no cargo override), so it + # uses buildPackages.cargo (= cargo-1.60.0) which can't parse workspace syntax. + # Fix: extend pkgs234 to set cargo = rustToolchain so buildPackages.cargo is modern. + # + # Bug 2: pkgs234's import-cargo-lock.nix is missing the replace-workspace-values.py + # step, so workspace inheritance keys remain in vendored Cargo.toml files. + # Fix: use the modern importCargoLock from pkgsWithRust which has this step, + # and inject it into the pkgs234-based rustPlatform.buildRustPackage. + pkgs234Fixed = + if pkgs.stdenv.isLinux then + pkgs234.extend (_: _: { cargo = rustToolchain; }) + else + pkgs234; + + # Modern importCargoLock (from pkgsWithRust) that supports workspace inheritance + # via replace-workspace-values.py. Used to override the missing step in pkgs234. + importCargoLockModern = + (pkgsWithRust.makeRustPlatform { + cargo = rustToolchain; + rustc = rustToolchain; + }).importCargoLock; + + # rustPlatform: on Linux use pkgs234Fixed (glibc 2.34) with modern importCargoLock + rustPlatform = + if pkgs.stdenv.isLinux then + let + base = pkgs234Fixed.makeRustPlatform { + cargo = rustToolchain; + rustc = rustToolchain; + }; + in + base // { + buildRustPackage = base.buildRustPackage.override { + importCargoLock = importCargoLockModern; + }; + } + else + pkgsWithRust.makeRustPlatform { + cargo = rustToolchain; + rustc = rustToolchain; + }; + + # Extract version from workspace Cargo.toml + cargoTomlContent = builtins.readFile ./Cargo.toml; + lines = pkgs.lib.splitString "\n" cargoTomlContent; + extractVersion = + lines: + let + findWorkspacePackage = + idx: + if idx >= builtins.length lines then + null + else if pkgs.lib.hasPrefix "[workspace.package]" (builtins.elemAt lines idx) then + idx + else + findWorkspacePackage (idx + 1); + + workspaceIdx = findWorkspacePackage 0; + + findVersion = + idx: + if idx >= builtins.length lines || workspaceIdx == null then + null + else + let + line = builtins.elemAt lines idx; + isNextSection = pkgs.lib.hasPrefix "[" line && idx > workspaceIdx; + in + if isNextSection then + null + else if pkgs.lib.hasPrefix "version" (pkgs.lib.replaceStrings [ " " "\t" ] [ "" "" ] line) then + builtins.elemAt (pkgs.lib.splitString "\"" line) 1 + else + findVersion (idx + 1); + in + if workspaceIdx == null then + throw "Could not find [workspace.package] in Cargo.toml" + else + let + ver = findVersion (workspaceIdx + 1); + in + if ver == null then throw "Could not find version in [workspace.package] section" else ver; + + authVersion = extractVersion lines; + + # Build cargo-generate-rpm from crates.io (not available in all pinned nixpkgs) + cargoGenerateRpmTool = rustPlatform.buildRustPackage rec { + pname = "cargo-generate-rpm"; + version = "0.16.0"; + src = pkgs.fetchCrate { + inherit pname version; + sha256 = "sha256-esp3MJ24RQpMFn9zPgccp7NESoFAUPU7y+YRsJBVVr4="; + }; + cargoSha256 = "sha256-mUsoPBgv60Eir/uIK+Xe+GmXdSFKXoopB4PlvFvHZuA="; + nativeBuildInputs = [ + rustToolchain + pkgs.pkg-config + pkgs.git + pkgs.cacert + ]; + doCheck = false; + }; + + # Build auth-server for static linkage + auth-server-static = pkgs.callPackage ./nix/auth-server.nix { + inherit pkgs pkgs234 rustPlatform; + version = authVersion; + static = true; + }; + + # Build auth-server for dynamic linkage + auth-server-dynamic = pkgs.callPackage ./nix/auth-server.nix { + inherit pkgs pkgs234 rustPlatform; + version = authVersion; + static = false; + }; + + # Docker image derivation (Linux only) + docker-image = pkgs.callPackage ./nix/docker.nix { + inherit pkgs; + authServer = auth-server-static; + version = authVersion; + }; + +in +{ + # Build attributes accessible via -A + inherit + auth-server-static + auth-server-dynamic + docker-image + cargoGenerateRpmTool + rustToolchain + ; + + # Convenience aliases used by packaging scripts + "auth-server-static-openssl" = auth-server-static; + "auth-server-dynamic-openssl" = auth-server-dynamic; +} diff --git a/deny.toml b/deny.toml new file mode 100644 index 0000000..c3f655f --- /dev/null +++ b/deny.toml @@ -0,0 +1,256 @@ +# This template contains all of the possible sections and their default values + +# Note that all fields that take a lint level have these possible values: +# * deny - An error will be produced and the check will fail +# * warn - A warning will be produced, but the check will not fail +# * allow - No warning or error will be produced, though in some cases a note +# will be + +# The values provided in this template are the default values that will be used +# when any section or field is not specified in your own configuration + +# Root options + +# The graph table configures how the dependency graph is constructed and thus +# which crates the checks are performed against +[graph] +# If 1 or more target triples (and optionally, target_features) are specified, +# only the specified targets will be checked when running `cargo deny check`. +# This means, if a particular package is only ever used as a target specific +# dependency, such as, for example, the `nix` crate only being used via the +# `target_family = "unix"` configuration, that only having windows targets in +# this list would mean the nix crate, as well as any of its exclusive +# dependencies not shared by any other crates, would be ignored, as the target +# list here is effectively saying which targets you are building for. +targets = [ + + + # The triple can be any string, but only the target triples built in to + # rustc (as of 1.40) can be checked against actual config expressions + # "x86_64-unknown-linux-musl", + # You can also specify which target_features you promise are enabled for a + # particular target. target_features are currently not validated against + # the actual valid features supported by the target architecture. + # { triple = "wasm32-unknown-unknown", features = ["atomics"] }, +] +# When creating the dependency graph used as the source of truth when checks are +# executed, this field can be used to prune crates from the graph, removing them +# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate +# is pruned from the graph, all of its dependencies will also be pruned unless +# they are connected to another crate in the graph that hasn't been pruned, +# so it should be used with care. The identifiers are [Package ID Specifications] +# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) +# exclude = [] +# If true, metadata will be collected with `--all-features`. Note that this can't +# be toggled off if true, if you want to conditionally enable `--all-features` it +# is recommended to pass `--all-features` on the cmd line instead +all-features = true +# If true, metadata will be collected with `--no-default-features`. The same +# caveat with `all-features` applies +no-default-features = false +# If set, these feature will be enabled when collecting metadata. If `--features` +# is specified on the cmd line they will take precedence over this option. +# features = [] + +# The output table provides options for how/if diagnostics are outputted +[output] +# When outputting inclusion graphs in diagnostics that include features, this +# option can be used to specify the depth at which feature edges will be added. +# This option is included since the graphs can be quite large and the addition +# of features from the crate(s) to all of the graph roots can be far too verbose. +# This option can be overridden via `--feature-depth` on the cmd line +feature-depth = 1 + +# This section is considered when running `cargo deny check advisories` +# More documentation for the advisories section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html +[advisories] +ignore = [ + # { id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, + # "a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish + # { crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, + { id = "RUSTSEC-2023-0071", reason = "Marvin Attack on rsa crate: no fix available yet, accepted risk" }, +] # The path where the advisory databases are cloned/fetched into +# db-path = "$CARGO_HOME/advisory-dbs" +# The url(s) of the advisory databases to use +# db-urls = ["https://github.com/rustsec/advisory-db"] +# A list of advisory IDs to ignore. Note that ignored advisories will still +# output a note when they are encountered. +# If this is true, then cargo deny will use the git executable to fetch advisory database. +# If this is false, then it uses a built-in git library. +# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. +# See Git Authentication for more information about setting up git authentication. +# git-fetch-with-cli = true + +# This section is considered when running `cargo deny check licenses` +# More documentation for the licenses section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html +[licenses] +# List of explicitly allowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +allow = [ + "MIT", + "Apache-2.0", + "Apache-2.0 WITH LLVM-exception", + "ISC", + "Zlib", + "BSD-2-Clause", + "BSD-3-Clause", + "Unicode-3.0", + "CC0-1.0", + "BUSL-1.1", + "CDLA-Permissive-2.0" +] +# The confidence threshold for detecting a license from license text. +# The higher the value, the more closely the license text must be to the +# canonical license text of a valid SPDX license file. +# [possible values: any between 0.0 and 1.0]. +confidence-threshold = 0.8 +# Allow 1 or more licenses on a per-crate basis, so that particular licenses +# aren't accepted for every possible crate as with the normal allow list +exceptions = [ + + + # Each entry is the crate and version constraint, and its specific allow + # list + # { allow = ["Zlib"], crate = "adler32" }, +] + +# Some crates don't have (easily) machine readable licensing information, +# adding a clarification entry for it allows you to manually specify the +# licensing information +[[licenses.clarify]] +# The package spec the clarification applies to +crate = "ring" +# The SPDX expression for the license requirements of the crate +expression = "MIT AND ISC AND OpenSSL" +# One or more files in the crate's source used as the "source of truth" for +# the license expression. If the contents match, the clarification will be used +# when running the license check, otherwise the clarification will be ignored +# and the crate will be checked normally, which may produce warnings or errors +# depending on the rest of your configuration +license-files = [ + # Each entry is a crate relative path, and the (opaque) hash of its contents + { path = "LICENSE", hash = 0xbd0eed23 }, +] + +[licenses.private] +# If true, ignores workspace crates that aren't published, or are only +# published to private registries. +# To see how to mark a crate as unpublished (to the official registry), +# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. +ignore = false +# One or more private registries that you might publish crates to, if a crate +# is only published to private registries, and ignore is true, the crate will +# not have its license(s) checked +registries = [ + + + # "https://sekretz.com/registry +] + +# This section is considered when running `cargo deny check bans`. +# More documentation about the 'bans' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html +[bans] +# Lint level for when multiple versions of the same crate are detected +multiple-versions = "warn" +# Lint level for when a crate version requirement is `*` +wildcards = "allow" +# The graph highlighting used when creating dotgraphs for crates +# with multiple versions +# * lowest-version - The path to the lowest versioned duplicate is highlighted +# * simplest-path - The path to the version with the fewest edges is highlighted +# * all - Both lowest-version and simplest-path are used +highlight = "all" +# The default lint level for `default` features for crates that are members of +# the workspace that is being checked. This can be overridden by allowing/denying +# `default` on a crate-by-crate basis if desired. +workspace-default-features = "allow" +# The default lint level for `default` features for external crates that are not +# members of the workspace. This can be overridden by allowing/denying `default` +# on a crate-by-crate basis if desired. +external-default-features = "allow" +# List of crates that are allowed. Use with care! +allow = [ + + + # "ansi_term@0.11.0", + # { crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, +] +# List of crates to deny +deny = [ + + + # "ansi_term@0.11.0", + # { crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, + # Wrapper crates can optionally be specified to allow the crate when it + # is a direct dependency of the otherwise banned crate + # { crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, +] + +# List of features to allow/deny +# Each entry the name of a crate and a version range. If version is +# not specified, all versions will be matched. +# [[bans.features]] +# crate = "reqwest" +# Features to not allow +# deny = ["json"] +# Features to allow +# allow = [ +# "rustls", +# "__rustls", +# "__tls", +# "hyper-rustls", +# "rustls", +# "rustls-pemfile", +# "rustls-tls-webpki-roots", +# "tokio-rustls", +# "webpki-roots", +# ] +# If true, the allowed features must exactly match the enabled feature set. If +# this is set there is no point setting `deny` +# exact = true + +# Certain crates/versions that will be skipped when doing duplicate detection. +skip = [ + + + # "ansi_term@0.11.0", + # { crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, +] +# Similarly to `skip` allows you to skip certain crates during duplicate +# detection. Unlike skip, it also includes the entire tree of transitive +# dependencies starting at the specified crate, up to a certain depth, which is +# by default infinite. +skip-tree = [ + + + # "ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies + # { crate = "ansi_term@0.11.0", depth = 20 }, +] + +# This section is considered when running `cargo deny check sources`. +# More documentation about the 'sources' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html +[sources] +# Lint level for what to happen when a crate from a crate registry that is not +# in the allow list is encountered +unknown-registry = "warn" +# Lint level for what to happen when a crate from a git repository that is not +# in the allow list is encountered +unknown-git = "warn" +# List of URLs for allowed crate registries. Defaults to the crates.io index +# if not specified. If it is specified but empty, no registries are allowed. +allow-registry = ["https://github.com/rust-lang/crates.io-index"] +# List of URLs for allowed Git repositories +allow-git = [] + +[sources.allow-org] +# 1 or more github.com organizations to allow git sources for +github = [""] +# 1 or more gitlab.com organizations to allow git sources for +gitlab = [""] +# 1 or more bitbucket.org organizations to allow git sources for +bitbucket = [""] diff --git a/nix/auth-server.nix b/nix/auth-server.nix new file mode 100644 index 0000000..4c6262b --- /dev/null +++ b/nix/auth-server.nix @@ -0,0 +1,290 @@ +{ + pkgs ? import { }, + pkgs234 ? pkgs, # nixpkgs 22.05 with glibc 2.34 (Rocky Linux 9 compatibility) + lib ? pkgs.lib, + # Provide a rustPlatform that uses the desired Rust but links against pkgs234 (glibc 2.34) + rustPlatform ? pkgs.rustPlatform, + # Version (from Cargo.toml) + version, + # Linkage mode: true for static OpenSSL, false for dynamic OpenSSL + static ? true, +}: + +let + # On Linux, use pkgs234 stdenv (glibc 2.34) to broaden runtime compatibility. + # On macOS, use the default stdenv. + platform = if pkgs.stdenv.isLinux then pkgs234 else pkgs; + + # Name tag for output symlinks + linkTag = if static then "static" else "dynamic"; + + # Expected deterministic sha256 of the final installed binary (auth_server) + # Naming convention (matches repository files): + # auth-server....sha256 + expectedHashDir = ./expected-hashes; + + # Helper: read & trim a hash file, returning null when absent or placeholder. + readHashFile = + name: + let + path = expectedHashDir + "/${name}"; + in + if builtins.pathExists path then + let + raw = builtins.readFile path; + trimmed = lib.replaceStrings [ "\n" "\r" " " "\t" ] [ "" "" "" "" ] raw; + isPlaceholder = builtins.match "^0+$" trimmed != null; + in + if trimmed != "" && !isPlaceholder then trimmed else null + else + null; + + # Pre-read expected hashes for every arch+os combination this derivation supports. + expectedHash_x86_64_linux = readHashFile "auth-server.${linkTag}.x86_64.linux.sha256"; + expectedHash_aarch64_linux = readHashFile "auth-server.${linkTag}.aarch64.linux.sha256"; + expectedHash_x86_64_darwin = readHashFile "auth-server.${linkTag}.x86_64.darwin.sha256"; + expectedHash_arm64_darwin = readHashFile "auth-server.${linkTag}.arm64.darwin.sha256"; + + srcRoot = ../.; + + filteredSrc = lib.cleanSourceWith { + src = srcRoot; + filter = + path: type: + let + rel = lib.removePrefix (toString srcRoot + "/") (toString path); + isEphemeral = + lib.hasInfix "/target/" rel + || lib.hasSuffix "/target" rel; + basePaths = + rel == "Cargo.toml" + || rel == "Cargo.lock" + || rel == "LICENSE" + || rel == "README.md" + || rel == "client" + || lib.hasPrefix "client/" rel + || rel == "server" + || lib.hasPrefix "server/" rel; + in + lib.cleanSourceFilter path type && (!isEphemeral) && basePaths; + }; + + # Git-sourced crate output hashes. + # When adding a new git dep, set a fake hash here, run nix-build, and copy the + # "got: sha256-..." value from the error into this attribute set. + gitDepOutputHashes = { }; + + # Build inputs: darwin frameworks + iconv + buildInputs = + [ ] + ++ lib.optionals pkgs.stdenv.isDarwin ( + let + fw = pkgs.darwin.apple_sdk.frameworks; + in + [ + fw.SystemConfiguration + fw.Security + fw.CoreFoundation + pkgs.libiconv + ] + ); + + # Native build inputs needed by vendored OpenSSL and aws-lc-sys + nativeBuildInputs = + [ + pkgs.pkg-config + pkgs.perl # required by openssl crate vendored build + pkgs.cmake # required by aws-lc-sys + ] + ++ lib.optionals pkgs.stdenv.isLinux [ + platform.patchelf + ]; + +in +rustPlatform.buildRustPackage { + pname = "auth_server"; + inherit version; + + src = filteredSrc; + + # Use cargoLock instead of cargoHash to support git-sourced dependencies + # (e.g. cosmian_logger from github.com/Cosmian/http_client_server). + # Output hashes for git deps must be set in gitDepOutputHashes above. + cargoLock = { + lockFile = ../Cargo.lock; + outputHashes = gitDepOutputHashes; + }; + + inherit buildInputs nativeBuildInputs; + + # Enable vendored OpenSSL + database support by default + buildFeatures = [ ]; + + # Build only the server binary + cargoBuildFlags = [ "-p" "auth_server" "--bin" "auth_server" ]; + + doCheck = false; + # Disable cargo-auditable: the pinned nixpkgs version doesn't support edition 2024 + auditable = false; + + # Custom build phase: explicit cargo build so we control flags precisely. + # We must set CC_ / CXX_ explicitly here because we use a + # custom buildPhase that bypasses cargoBuildHook, which normally injects these + # variables pointing to platform.stdenv.cc (pkgs234, glibc 2.34). Without + # them the cc/cmake crates fall back to whatever `cc` is found in PATH, which + # may be from the modern nixpkgs (glibc 2.40) and would produce object files + # referencing __isoc23_strtol / __isoc23_sscanf — symbols absent in glibc 2.34. + buildPhase = + let + # On aarch64 Linux, pkgs234's default stdenv.cc is gcc-9.3.0 which is rejected + # by aws-lc-sys v0.39.1+ due to a memcmp bug (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95189). + # Use gcc11 from pkgs234 instead — still glibc 2.34 compatible but without the bug. + effectiveCc = + if pkgs.stdenv.isLinux && platform.stdenv.hostPlatform.isAarch64 + then platform.gcc11 + else platform.stdenv.cc; + ccBin = "${effectiveCc}/bin/${effectiveCc.targetPrefix}cc"; + cxxBin = "${effectiveCc}/bin/${effectiveCc.targetPrefix}c++"; + # Convert "x86_64-unknown-linux-gnu" → "x86_64_unknown_linux_gnu" + rustTriple = lib.replaceStrings [ "-" ] [ "_" ] platform.stdenv.hostPlatform.config; + ccExports = lib.optionalString pkgs.stdenv.isLinux '' + export CC_${rustTriple}=${ccBin} + export CXX_${rustTriple}=${cxxBin} + ''; + in + '' + echo "== cargo build auth_server (release) ==" + ${ccExports}cargo build --release -p auth_server --bin auth_server + ''; + + # Custom install phase: copy the binary and immediately patch its ELF + # interpreter to the SYSTEM dynamic linker (not the Nix store one), + # so the resulting binary is portable to any Linux with glibc >= 2.34. + installPhase = '' + runHook preInstall + mkdir -p "$out/bin" + install -m755 target/release/auth_server "$out/bin/auth_server" + + if [ "$(uname)" = "Linux" ]; then + ARCH="$(uname -m)" + if [ "$ARCH" = "x86_64" ]; then + DL="/lib64/ld-linux-x86-64.so.2" + elif [ "$ARCH" = "aarch64" ]; then + DL="/lib/ld-linux-aarch64.so.1" + fi + if [ -n "$DL" ]; then + patchelf --set-interpreter "$DL" "$out/bin/auth_server" \ + || echo "Warning: patchelf failed (binary may be statically linked)" + patchelf --remove-rpath "$out/bin/auth_server" 2>/dev/null || true + fi + fi + runHook postInstall + ''; + + # postInstall: verify binary and run hash checks. + postInstall = '' + BIN="$out/bin/auth_server" + [ -f "$BIN" ] || { echo "ERROR: Binary not found at $BIN"; exit 1; } + echo "Binary exists at: $BIN" + + file "$BIN" || true + if [ "$(uname)" = "Linux" ]; then + readelf -l "$BIN" | grep -A 2 "interpreter" || true + elif [ "$(uname)" = "Darwin" ]; then + otool -L "$BIN" || true + fi + + if [ "$(uname)" = "Linux" ]; then + # Verify GLIBC requirement does not exceed 2.34 (Rocky Linux 9 compatibility) + MAX_VER=$(readelf -sW "$BIN" | grep -o 'GLIBC_[0-9][0-9.]*' | sed 's/^GLIBC_//' | sort -V | tail -n1) + if [ -n "$MAX_VER" ]; then + if [ "$(printf '%s\n' "$MAX_VER" "2.34" | sort -V | tail -n1)" != "2.34" ]; then + echo "ERROR: GLIBC $MAX_VER > 2.34 — binary not portable to Rocky Linux 9"; exit 1 + fi + fi + + # Compute and save binary hash + ACTUAL=$(sha256sum "$BIN" | awk '{print $1}') + echo "$ACTUAL" > "$out/bin/auth_server.sha256" + echo "Binary sha256: $ACTUAL" + + ARCH_LINUX="$(uname -m)" + case "$ARCH_LINUX" in + x86_64) ARCH_TAG="x86_64" ;; + aarch64|arm64) ARCH_TAG="aarch64" ;; + *) ARCH_TAG="$ARCH_LINUX" ;; + esac + HASH_FILENAME="auth-server.${linkTag}.$ARCH_TAG.linux.sha256" + + EXPECTED="" + case "$ARCH_LINUX" in + x86_64) EXPECTED="${toString expectedHash_x86_64_linux}" ;; + aarch64) EXPECTED="${toString expectedHash_aarch64_linux}" ;; + esac + + if [ -n "$EXPECTED" ]; then + if [ "$ACTUAL" = "$EXPECTED" ]; then + echo "Deterministic hash check PASSED: $ACTUAL" + else + echo "ERROR: Deterministic hash MISMATCH!" + echo " Expected: $EXPECTED" + echo " Actual: $ACTUAL" + echo " Update: echo '$ACTUAL' > nix/expected-hashes/$HASH_FILENAME" + exit 1 + fi + else + echo "NOTE: No expected hash for $HASH_FILENAME — bootstrapping (hash not yet recorded)" + echo "$ACTUAL" > "$out/bin/$HASH_FILENAME" + echo " Copy to repo: nix/expected-hashes/$HASH_FILENAME" + fi + elif [ "$(uname)" = "Darwin" ]; then + ACTUAL=$(shasum -a 256 "$BIN" | awk '{print $1}') + echo "$ACTUAL" > "$out/bin/auth_server.sha256" + echo "Binary sha256: $ACTUAL" + + ARCH_DARWIN="$(uname -m)" + case "$ARCH_DARWIN" in + x86_64) ARCH_TAG="x86_64" ;; + arm64) ARCH_TAG="arm64" ;; + *) ARCH_TAG="$ARCH_DARWIN" ;; + esac + HASH_FILENAME="auth-server.${linkTag}.$ARCH_TAG.darwin.sha256" + + EXPECTED="" + case "$ARCH_DARWIN" in + x86_64) EXPECTED="${toString expectedHash_x86_64_darwin}" ;; + arm64) EXPECTED="${toString expectedHash_arm64_darwin}" ;; + esac + + if [ -n "$EXPECTED" ]; then + if [ "$ACTUAL" = "$EXPECTED" ]; then + echo "Deterministic hash check PASSED: $ACTUAL" + else + echo "ERROR: Deterministic hash MISMATCH!" + echo " Expected: $EXPECTED" + echo " Actual: $ACTUAL" + echo " Update: echo '$ACTUAL' > nix/expected-hashes/$HASH_FILENAME" + exit 1 + fi + else + echo "NOTE: No expected hash for $HASH_FILENAME — bootstrapping" + echo "$ACTUAL" > "$out/bin/$HASH_FILENAME" + echo " Copy to repo: nix/expected-hashes/$HASH_FILENAME" + fi + fi + + echo "postInstall complete — binary is ready" + ''; + + meta = { + description = "Cosmian Authentication Server"; + homepage = "https://github.com/Cosmian/authentication"; + license = { + spdxId = "BUSL-1.1"; + fullName = "Business Source License 1.1"; + free = false; + }; + maintainers = [ ]; + platforms = lib.platforms.unix; + }; +} diff --git a/nix/docker.nix b/nix/docker.nix new file mode 100644 index 0000000..88ccd21 --- /dev/null +++ b/nix/docker.nix @@ -0,0 +1,95 @@ +{ + pkgs ? import { }, + # Auth server derivation to include in the image + authServer ? null, + # Version (from Cargo.toml) + version, +}: + +let + actualAuthServer = + if authServer != null then + authServer + else + builtins.throw "authServer parameter is required. Pass it from default.nix"; + + imageName = "cosmian-auth-server"; + imageTag = "${version}"; + + runtimeEnv = pkgs.buildEnv { + name = "auth-server-runtime-env"; + paths = [ + actualAuthServer + pkgs.tzdata + pkgs.coreutils + pkgs.bash + ]; + }; + + etcPasswd = pkgs.writeTextFile { + name = "passwd"; + text = '' + root:x:0:0:root:/root:/bin/sh + auth:x:1000:1000:Auth User:/home/auth:/bin/sh + ''; + destination = "/etc/passwd"; + }; + + etcGroup = pkgs.writeTextFile { + name = "group"; + text = '' + root:x:0: + auth:x:1000: + ''; + destination = "/etc/group"; + }; + + etcNsswitch = pkgs.writeTextFile { + name = "nsswitch.conf"; + text = '' + hosts: files dns + networks: files + passwd: files + group: files + ''; + destination = "/etc/nsswitch.conf"; + }; + +in +pkgs.dockerTools.buildImage { + name = imageName; + tag = imageTag; + + copyToRoot = pkgs.buildEnv { + name = "image-root"; + paths = [ + runtimeEnv + etcPasswd + etcGroup + etcNsswitch + pkgs.dockerTools.caCertificates + ]; + pathsToLink = [ + "/bin" + "/etc" + "/usr" + "/var" + ]; + }; + + config = { + Cmd = [ "/bin/auth_server" ]; + ExposedPorts = { + "9005/tcp" = { }; + }; + User = "1000:1000"; + WorkingDir = "/home/auth"; + Labels = { + "org.opencontainers.image.title" = "Cosmian Authentication Server"; + "org.opencontainers.image.version" = version; + "org.opencontainers.image.vendor" = "Cosmian"; + }; + }; + + created = "now"; +} diff --git a/nix/expected-hashes/.gitignore b/nix/expected-hashes/.gitignore new file mode 100644 index 0000000..8a46eec --- /dev/null +++ b/nix/expected-hashes/.gitignore @@ -0,0 +1 @@ +auth-server* diff --git a/nix/expected-hashes/README.md b/nix/expected-hashes/README.md new file mode 100644 index 0000000..3f03418 --- /dev/null +++ b/nix/expected-hashes/README.md @@ -0,0 +1,35 @@ +# Expected Hashes + +This directory stores expected SHA-256 hashes for deterministic build verification. + +## Files + +### Cargo vendor hashes + +Used by `nix/auth-server.nix` to verify reproducible Cargo vendoring: + +- `server.vendor.static.sha256` — Cargo vendor hash for static builds +- `server.vendor.dynamic.sha256` — Cargo vendor hash for dynamic builds + +### Binary hashes + +Generated after a successful build and used for cross-run determinism checks: + +- `auth-server....sha256` + +## How to update + +When `Cargo.lock` changes (new or updated dependency), the vendor hashes become +stale. To regenerate: + +```bash +# 1. Put a fake hash to trigger hash mismatch error +echo "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=" > nix/expected-hashes/server.vendor.static.sha256 + +# 2. Run nix-build; it will fail with: got: sha256-... +nix-build -I nixpkgs="$(grep url default.nix | head -1 | sed 's/.*"\(.*\)".*/\1/')" -A auth-server-static 2>&1 | grep "got:" + +# 3. Copy the correct hash into the file +``` + +Repeat for `server.vendor.dynamic.sha256`. diff --git a/nix/expected-hashes/server.vendor.dynamic.sha256 b/nix/expected-hashes/server.vendor.dynamic.sha256 new file mode 100644 index 0000000..f9714c9 --- /dev/null +++ b/nix/expected-hashes/server.vendor.dynamic.sha256 @@ -0,0 +1 @@ +sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= diff --git a/nix/expected-hashes/server.vendor.static.sha256 b/nix/expected-hashes/server.vendor.static.sha256 new file mode 100644 index 0000000..f9714c9 --- /dev/null +++ b/nix/expected-hashes/server.vendor.static.sha256 @@ -0,0 +1 @@ +sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= diff --git a/pkg/auth_server.service b/pkg/auth_server.service new file mode 100644 index 0000000..dfc67e2 --- /dev/null +++ b/pkg/auth_server.service @@ -0,0 +1,24 @@ +[Unit] +Description=Cosmian Authentication Server +Documentation=https://docs.cosmian.com +After=network.target + +[Service] +Type=simple +User=cosmian +Group=cosmian +ExecStart=/usr/sbin/auth_server /etc/cosmian/auth_server.toml +Restart=on-failure +RestartSec=5 +StandardOutput=journal +StandardError=journal +SyslogIdentifier=auth_server + +# Security hardening +NoNewPrivileges=true +ProtectSystem=strict +ProtectHome=true +ReadWritePaths=/var/lib/cosmian-auth + +[Install] +WantedBy=multi-user.target diff --git a/pkg/deb/postinst b/pkg/deb/postinst new file mode 100755 index 0000000..44261da --- /dev/null +++ b/pkg/deb/postinst @@ -0,0 +1,24 @@ +#!/bin/bash +# Debian post-install script for auth_server +set -e + +# Create cosmian user/group if they don't already exist +if ! getent group cosmian >/dev/null 2>&1; then + groupadd --system cosmian +fi +if ! getent passwd cosmian >/dev/null 2>&1; then + useradd --system --no-create-home --shell /usr/sbin/nologin --gid cosmian cosmian +fi + +# Create data directory +mkdir -p /var/lib/cosmian-auth +chown cosmian:cosmian /var/lib/cosmian-auth +chmod 750 /var/lib/cosmian-auth + +# Protect config file +if [ -f /etc/cosmian/auth_server.toml ]; then + chmod 400 /etc/cosmian/auth_server.toml + chown root:cosmian /etc/cosmian/auth_server.toml +fi + +#DEBHELPER# diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000..32c68ee --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,3 @@ +[toolchain] +channel = "1.94.1" +components = ["rustfmt", "clippy"] diff --git a/server/Cargo.toml b/server/Cargo.toml index 9d70f28..20a190b 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -24,7 +24,6 @@ actix-tls = { workspace = true } actix-web = { workspace = true, features = ["macros", "cookies"] } argon2 = { workspace = true } async-trait = { workspace = true } -base32 = { workspace = true } base64 = { workspace = true } chrono = { workspace = true } cosmian_logger = { workspace = true, features = ["full"] } @@ -48,7 +47,7 @@ sqlx = { workspace = true, features = [ ], optional = true } thiserror = { workspace = true } tokio = { workspace = true } -auth_client = { path = "../client", features = ["_server"] } +auth_client = { path = "../client", features = ["_server"], version = "1.0.0" } toml = { workspace = true } totp-rs = { workspace = true, features = ["otpauth", "gen_secret"] } url = { workspace = true } @@ -64,3 +63,48 @@ path = "src/main.rs" [dev-dependencies] openssl = { workspace = true, features = ["vendored"] } tokio = { workspace = true } + +[package.metadata.deb] +maintainer = "Cosmian support team " +maintainer-scripts = "../pkg/deb/" +copyright = "2026, Cosmian Tech SAS " +license-file = ["../LICENSE", "0"] +section = "security" +priority = "optional" +depends = "" +assets = [ + [ + "target/release/auth_server", + "usr/sbin/auth_server", + "500", + ], + [ + "../README.md", + "usr/share/doc/cosmian-auth-server/README.md", + "644", + ], + [ + "../server/auth_server.toml", + "etc/cosmian/auth_server.toml", + "400", + ], +] +systemd-units = [ + { unit-name = "auth_server", unit-scripts = "../pkg", enable = true, start = false, restart-after-upgrade = false }, +] + +[package.metadata.generate-rpm] +assets = [ + { source = "target/release/auth_server", dest = "/usr/sbin/auth_server", mode = "0500" }, + { source = "../README.md", dest = "/usr/share/doc/cosmian-auth-server/README.md", mode = "0644" }, + { source = "../server/auth_server.toml", dest = "/etc/cosmian/auth_server.toml", mode = "0400" }, +] +[package.metadata.generate-rpm.requires] + +[package.metadata.packager] +name = "Cosmian Authentication Server" +identifier = "com.cosmian.authentication" +description = "Cosmian Authentication Server" +homepage = "https://github.com/Cosmian/authentication" +license-file = "../LICENSE" +formats = ["dmg"] diff --git a/server/src/database/passwords.rs b/server/src/database/passwords.rs index 0c277fc..d5cc5bb 100644 --- a/server/src/database/passwords.rs +++ b/server/src/database/passwords.rs @@ -1,4 +1,4 @@ -use argon2::{password_hash::SaltString, Argon2, PasswordHasher}; +use argon2::{Argon2, PasswordHasher, password_hash::SaltString}; use auth_client::{AuthError, AuthResult}; use sha2::Digest; diff --git a/server/src/error/mod.rs b/server/src/error/mod.rs new file mode 100644 index 0000000..81c05c7 --- /dev/null +++ b/server/src/error/mod.rs @@ -0,0 +1,28 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum ServerError { + #[error("Authentication error: {0}")] + Authentication(String), + + #[error("Configuration error: {0}")] + Config(String), + + #[error("TLS error: {0}")] + Tls(String), +} + +pub type ServerResult = Result; + +#[cfg(feature = "rustls")] +impl From for ServerError { + fn from(e: rustls::Error) -> Self { + Self::Tls(e.to_string()) + } +} + +impl From for crate::AuthError { + fn from(e: ServerError) -> Self { + Self::Generic(e.to_string()) + } +} diff --git a/server/src/lib.rs b/server/src/lib.rs index 5615987..51536d2 100644 --- a/server/src/lib.rs +++ b/server/src/lib.rs @@ -3,6 +3,9 @@ pub use auth_client::*; // Macros must be re-exported explicitly pub use auth_client::{auth_bail, auth_ensure, auth_error}; +pub mod error; +pub use error::{ServerError, ServerResult}; + mod database; mod middleware; @@ -13,11 +16,11 @@ pub use server::parameters::{DatabaseBackend, DatabaseParams, ServerParams}; pub use server::start_auth_server; mod session; -pub use session::{build_cookie, delete_cookie}; pub use session::{ - create_session_store_with_collector, start_stale_session_collector, SessionStore, - StaleSessionCollectorConfig, + SessionStore, StaleSessionCollectorConfig, create_session_store_with_collector, + start_stale_session_collector, }; +pub use session::{build_cookie, delete_cookie}; pub mod tls; @@ -28,9 +31,9 @@ pub mod client { } pub mod models { pub use auth_client::{ - AuthPrivateClaims, AuthScheme, AuthenticatedClientScheme, AuthenticationNextStep, - AuthenticationResult, ClientClaims, LoginRequest, Realm, RegisteredClaims, SessionData, - User, UserPass, ADMIN_REALM, + ADMIN_REALM, AuthPrivateClaims, AuthScheme, AuthenticatedClientScheme, + AuthenticationNextStep, AuthenticationResult, ClientClaims, LoginRequest, Realm, + RegisteredClaims, SessionData, User, UserPass, }; } diff --git a/server/src/middleware/jwt/client_claim.rs b/server/src/middleware/jwt/client_claim.rs index 2738d38..5979570 100644 --- a/server/src/middleware/jwt/client_claim.rs +++ b/server/src/middleware/jwt/client_claim.rs @@ -3,7 +3,9 @@ use cosmian_logger::{debug, trace}; use jsonwebtoken::dangerous::insecure_decode; #[cfg(not(feature = "no_jwt_validation"))] use jsonwebtoken::decode; -use jsonwebtoken::{DecodingKey, Validation, decode_header}; +use jsonwebtoken::decode_header; +#[cfg(not(feature = "no_jwt_validation"))] +use jsonwebtoken::{DecodingKey, Validation}; use std::sync::Arc; use crate::{ diff --git a/server/src/server/auth_server.rs b/server/src/server/auth_server.rs index 6a6470d..266a49d 100644 --- a/server/src/server/auth_server.rs +++ b/server/src/server/auth_server.rs @@ -32,6 +32,9 @@ use std::{ #[cfg(feature = "openssl")] use crate::tls::openssl_config::{create_openssl_acceptor, extract_openssl_peer_certificate}; +#[cfg(feature = "rustls")] +use crate::tls::rustls_config::{extract_rustls_peer_certificate, rustls_server_config}; + /// Inner function to start the test server asynchronously. pub async fn start_auth_server( server_params: Arc, diff --git a/server/src/server/endpoints/totp_endpoints.rs b/server/src/server/endpoints/totp_endpoints.rs index 4177c34..8cad0af 100644 --- a/server/src/server/endpoints/totp_endpoints.rs +++ b/server/src/server/endpoints/totp_endpoints.rs @@ -8,9 +8,9 @@ use actix_web::{ HttpRequest, HttpResponse, delete, post, web::{Data, Json, Path}, }; +use auth_client::{TotpGenerateRequest, TotpGenerateResponse, TotpVerifyRequest}; use cosmian_logger::info; use std::sync::Arc; -use auth_client::{TotpGenerateRequest, TotpGenerateResponse, TotpVerifyRequest}; /// Generate a new TOTP secret for a user. /// diff --git a/server/src/tls/mod.rs b/server/src/tls/mod.rs index 43b13e1..5631d07 100644 --- a/server/src/tls/mod.rs +++ b/server/src/tls/mod.rs @@ -2,3 +2,11 @@ pub mod openssl_config; #[cfg(feature = "rustls")] pub mod rustls_config; + +/// Peer certificate extracted from a TLS handshake and inserted into request extensions. +/// Shared by both the OpenSSL and Rustls TLS backends. +#[cfg(feature = "openssl")] +#[derive(Debug, Clone)] +pub struct PeerCertificate { + pub cert: openssl::x509::X509, +} diff --git a/server/src/tls/openssl_config.rs b/server/src/tls/openssl_config.rs index e05eb7d..5dc8aa5 100644 --- a/server/src/tls/openssl_config.rs +++ b/server/src/tls/openssl_config.rs @@ -1,4 +1,5 @@ use crate::server::parameters::TlsParams; +use crate::tls::PeerCertificate; use crate::{AuthError, AuthResult}; use actix_web::dev::Extensions; use cosmian_logger::debug; @@ -10,14 +11,6 @@ use openssl::{ x509::{X509, store::X509StoreBuilder}, }; use std::any::Any; -/// The extension struct holding the peer certificate during the connection. -/// -/// This struct stores the peer certificate in the request context. -#[derive(Debug, Clone)] -pub struct PeerCertificate { - /// The peer certificate. - pub cert: X509, -} /// Extract the peer certificate from the TLS stream and pass it to middleware. /// diff --git a/server/src/tls/rustls_config.rs b/server/src/tls/rustls_config.rs index 563c6b3..13d338f 100644 --- a/server/src/tls/rustls_config.rs +++ b/server/src/tls/rustls_config.rs @@ -1,15 +1,22 @@ -use crate::{HeiError, TlsConfig}; +use crate::error::{ServerError, ServerResult}; +use crate::server::parameters::TlsParams; +use crate::tls::PeerCertificate; +use actix_web::dev::Extensions; +use cosmian_logger::{error, info}; +use openssl::x509::X509; use rustls::pki_types::pem::PemObject; use rustls::pki_types::{CertificateDer, PrivateKeyDer}; use rustls::server::WebPkiClientVerifier; use rustls::{RootCertStore, ServerConfig}; +use std::any::Any; +use std::net::TcpStream; use std::sync::Arc; -pub(crate) fn rustls_server_config(tls_config: &TlsConfig) -> Result { +pub(crate) fn rustls_server_config(tls_config: &TlsParams) -> ServerResult { rustls::crypto::aws_lc_rs::default_provider() .install_default() .map_err(|_e| { - HeiError::Config( + ServerError::Config( "Failed to install AWS-LC-Rust as the default crypto provider".to_owned(), ) })?; @@ -24,7 +31,7 @@ pub(crate) fn rustls_server_config(tls_config: &TlsConfig) -> Result Result = CertificateDer::pem_file_iter(&tls_config.server_ca_chain) .map_err(|e| { - HeiError::Config(format!( + ServerError::Config(format!( "Failed to read server certificate chain from PEM: {e}" )) })? @@ -55,13 +62,14 @@ pub(crate) fn rustls_server_config(tls_config: &TlsConfig) -> Result Result>() @@ -80,12 +86,9 @@ pub(crate) fn extract_rustls_peer_certificate(cnx: &dyn Any, extensions: &mut Ex info!("Extracting peer certificate from TLS connection..."); let (_socket, tls_session) = tls_socket.get_ref(); if let Some(certs) = tls_session.peer_certificates() { - // insert a `rustls::Certificate` into request extensions` if let Some(cert) = certs.last() { info!("A client certificate was found"); - let Ok(openssl_cert) = X509::from_der(cert.as_ref()).map_err(|_| { - HeiError::Authentication("Failed to parse client certificate".to_owned()) - }) else { + let Ok(openssl_cert) = X509::from_der(cert.as_ref()) else { error!("Failed to parse client certificate"); return; }; diff --git a/shell.nix b/shell.nix new file mode 100644 index 0000000..cf9f865 --- /dev/null +++ b/shell.nix @@ -0,0 +1,54 @@ +{ + variant ? "default", + pkgs ? + let + rustOverlay = import ( + builtins.fetchTarball { + url = "https://github.com/oxalica/rust-overlay/archive/a313afc75b85fc77ac154bf0e62c36f68361fd0b.tar.gz"; + sha256 = "0fb18ysw2dgm3033kcv3nlhsihckssnq6j5ayq4zjq148f12m7yv"; + } + ); + pinned = + import + (builtins.fetchTarball { + url = "https://package.cosmian.com/nixpkgs/8b27c1239e5c421a2bbc2c65d52e4a6fbf2ff296.tar.gz"; + }) + { + overlays = [ rustOverlay ]; + config.allowUnfree = true; + }; + in + pinned, +}: + +let + withCurl = (builtins.getEnv "WITH_CURL") == "1"; + + rustToolchain = pkgs.rust-bin.stable.latest.default; + +in +pkgs.mkShell { + name = "auth-server-dev"; + + buildInputs = + [ + rustToolchain + pkgs.pkg-config + pkgs.perl # for vendored OpenSSL + pkgs.cmake # for aws-lc-sys (jsonwebtoken/aws_lc_rs) + pkgs.openssl + pkgs.cargo-deny + pkgs.cargo-edit + ] + ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [ + pkgs.libiconv + pkgs.darwin.apple_sdk.frameworks.SystemConfiguration + pkgs.darwin.apple_sdk.frameworks.Security + pkgs.darwin.apple_sdk.frameworks.CoreFoundation + ] + ++ pkgs.lib.optionals withCurl [ pkgs.curl ]; + + shellHook = '' + echo "Auth Server dev shell (Rust $(rustc --version))" + ''; +}