From c5985d1ef814ce6f0121691c194b84057b75447e Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 23 Feb 2026 16:02:12 +0000 Subject: [PATCH 01/27] feat(SOG-480): Implement in Rust --- .gitignore | 9 ++ Cargo.toml | 22 ++++ RUST_README.md | 186 ++++++++++++++++++++++++++++++++ src/api_client.rs | 116 ++++++++++++++++++++ src/main.rs | 236 ++++++++++++++++++++++++++++++++++++++++ src/models.rs | 82 ++++++++++++++ src/mount.rs | 184 +++++++++++++++++++++++++++++++ src/oauth2_auth.rs | 264 +++++++++++++++++++++++++++++++++++++++++++++ 8 files changed, 1099 insertions(+) create mode 100644 Cargo.toml create mode 100644 RUST_README.md create mode 100644 src/api_client.rs create mode 100644 src/main.rs create mode 100644 src/models.rs create mode 100644 src/mount.rs create mode 100644 src/oauth2_auth.rs diff --git a/.gitignore b/.gitignore index 06dbe88..80d96c2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,9 @@ +# Rust/Cargo +target/ +Cargo.lock +**/*.rs.bk +*.pdb + # Python-generated files __pycache__/ *.py[oc] @@ -12,3 +18,6 @@ wheels/ # IDE .vscode +# OS files +.DS_Store +*~ diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..80ed59c --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "path-finder" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "path-finder" +path = "src/main.rs" + +[dependencies] +clap = { version = "4.5", features = ["derive"] } +reqwest = { version = "0.12", features = ["json", "blocking"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +anyhow = "1.0" +thiserror = "1.0" +tokio = { version = "1.40", features = ["full"] } +regex = "1.10" +dirs = "5.0" +libc = "0.2" + +[dev-dependencies] diff --git a/RUST_README.md b/RUST_README.md new file mode 100644 index 0000000..7402c76 --- /dev/null +++ b/RUST_README.md @@ -0,0 +1,186 @@ +# Path Finder - Rust Implementation + +A Rust implementation of the SKA path finder tool for locating and mounting data from SKA storage systems. + +## Overview + +This project replaces the Python-based path finder with a high-performance Rust implementation. It provides two binaries: + +1. **path-finder** - Main CLI tool for authenticating and locating SKA data +2. **pathfinder-mount** - Utility for mounting/unmounting data using bindfs + +## Features + +- OAuth2 device code flow authentication +- Token caching for improved performance +- Data location lookup via Data Management API +- Site capabilities verification via Site Capabilities API +- Automated data mounting with proper permissions +- Error handling and validation + +## Building + +```bash +cargo build --release +``` + +The binaries will be available in `target/release/`: + +- `target/release/path-finder` +- `target/release/pathfinder-mount` + +## Installation + +### Option 1: Install from source + +```bash +cargo install --path . +``` + +### Option 2: Manual installation + +```bash +sudo cp target/release/path-finder /usr/local/bin/ +sudo cp target/release/pathfinder-mount /usr/local/bin/ +sudo chmod +x /usr/local/bin/path-finder +sudo chmod +x /usr/local/bin/pathfinder-mount +``` + +## Usage + +### Main Path Finder + +With OAuth2 authentication (recommended): + +```bash +path-finder \ + --namespace daac \ + --file_name pi24_test_run_1_cleaned.fits \ + --site_name UKSRC-CAM-PREPROD +``` + +With environment variables (for automation): + +```bash +export DATA_MANAGEMENT_ACCESS_TOKEN="your_token_here" +export SITE_CAPABILITIES_ACCESS_TOKEN="your_token_here" + +path-finder \ + --namespace daac \ + --file_name pi24_test_run_1_cleaned.fits \ + --site_name UKSRC-CAM-PREPROD \ + --no-login +``` + +### Mount Utility + +The mount utility is designed to be called with sudo privileges. It handles: + +- Creating bind mounts from `/skadata` to user home directories +- Setting appropriate permissions +- Managing mount points to avoid cyclic mounts + +Mount data: + +```bash +sudo pathfinder-mount --mount /daac/pi24_test_run_1_cleaned.fits daac +``` + +Unmount data: + +```bash +sudo pathfinder-mount --unmount /daac/pi24_test_run_1_cleaned.fits daac +``` + +## Architecture + +### Modules + +- **oauth2_auth.rs** - OAuth2 device code flow implementation with token caching +- **models.rs** - Data structures for API responses (sites, nodes, storage areas, data locations) +- **api_client.rs** - HTTP client for Data Management and Site Capabilities APIs +- **main.rs** - Main path finder CLI logic +- **mount.rs** - Mount/unmount utility for data access + +### Authentication Flow + +1. Initiate device code flow with authn service +2. Display user code and verification URL +3. Poll for authentication completion +4. Exchange device token for API-specific tokens +5. Cache tokens for future use (default: 1 hour) + +### Data Location Flow + +1. Verify namespace exists in Data Management API +2. Verify site name exists in Site Capabilities API +3. Fetch site storage area mappings +4. Locate data file in namespace +5. Verify data is available at requested site +6. Extract RSE path from replica URIs +7. Call mount utility to make data accessible + +## Dependencies + +- **clap** - Command-line argument parsing +- **reqwest** - HTTP client +- **serde** - Serialization/deserialization +- **anyhow** - Error handling +- **regex** - Pattern matching for RSE paths +- **dirs** - Cross-platform config directory location + +## System Requirements + +- **bindfs** - FUSE filesystem for permission remapping +- **sudo** - Required for mount operations +- **mountpoint** - Used to verify mount status + +## Token Caching + +Tokens are cached in `~/.config/path-finder/tokens.json` with secure permissions (0600). +Cache expires after 1 hour (configurable in code). + +## Error Handling + +The tool provides detailed error messages for: + +- Network failures +- Authentication failures +- Missing data or sites +- Permission issues +- Mount failures + +## Comparison with Python Implementation + +| Feature | Python | Rust | +| -------------- | ------------------------- | ----------------- | +| Performance | Slower | Faster | +| Memory Usage | Higher | Lower | +| Binary Size | N/A (interpreted) | ~6MB (release) | +| Dependencies | Runtime Python + packages | Statically linked | +| Error Messages | Good | Excellent | +| Type Safety | Runtime (Pydantic) | Compile-time | + +## Development + +Run tests: + +```bash +cargo test +``` + +Format code: + +```bash +cargo fmt +``` + +Lint code: + +```bash +cargo clippy +``` + +## License + +Same as the original Python implementation. diff --git a/src/api_client.rs b/src/api_client.rs new file mode 100644 index 0000000..7e34d2d --- /dev/null +++ b/src/api_client.rs @@ -0,0 +1,116 @@ +use anyhow::{Context, Result}; +use reqwest::blocking::Client; +use crate::models::*; + +const DM_API_BASEURL: &str = "https://data-management.srcnet.skao.int/api/v1"; +const SC_API_BASEURL: &str = "https://site-capabilities.srcnet.skao.int/api/v1"; + +pub struct ApiClient { + client: Client, + dm_token: String, + sc_token: String, +} + +impl ApiClient { + pub fn new(dm_token: String, sc_token: String) -> Self { + Self { + client: Client::new(), + dm_token, + sc_token, + } + } + + pub fn get_all_namespaces(&self) -> Result> { + let url = format!("{}/data/list", DM_API_BASEURL); + let response = self.client + .get(&url) + .bearer_auth(&self.dm_token) + .send() + .context("Failed to request namespaces from DM API")?; + + response + .error_for_status() + .context("DM API request failed")? + .json() + .context("Failed to parse namespaces response") + } + + pub fn check_namespace_available(&self, namespace: &str) -> Result<()> { + let namespaces = self.get_all_namespaces()?; + if !namespaces.contains(&namespace.to_string()) { + anyhow::bail!( + "Namespace '{}' not found in available namespaces: {:?}", + namespace, + namespaces + ); + } + Ok(()) + } + + pub fn all_site_names(&self) -> Result> { + let url = format!("{}/sites", SC_API_BASEURL); + let response = self.client + .get(&url) + .bearer_auth(&self.sc_token) + .send() + .context("Failed to request sites from SC API")?; + + let sites: SitesAPIResponse = response + .error_for_status() + .context("SC API request failed")? + .json() + .context("Failed to parse sites response")?; + + Ok(sites.into_iter().map(|site| site.name).collect()) + } + + pub fn check_site_name_exists(&self, site_name: &str) -> Result<()> { + let sites = self.all_site_names()?; + if !sites.contains(&site_name.to_string()) { + eprintln!( + "Error: Site name '{}' not found in available sites:\n\n{}", + site_name, + sites.join(", ") + ); + std::process::exit(1); + } + Ok(()) + } + + pub fn site_storage_areas(&self) -> Result { + let url = format!("{}/nodes", SC_API_BASEURL); + let response = self.client + .get(&url) + .bearer_auth(&self.sc_token) + .send() + .context("Failed to request nodes from SC API")?; + + let nodes: NodesAPIResponse = response + .error_for_status() + .context("SC API request failed")? + .json() + .context("Failed to parse nodes response")?; + + Ok(get_all_node_storage_areas(&nodes)) + } + + pub fn locate_data(&self, namespace: &str, file_name: &str) -> Result { + let url = format!("{}/data/locate/{}/{}", DM_API_BASEURL, namespace, file_name); + let response = self.client + .get(&url) + .bearer_auth(&self.dm_token) + .send() + .with_context(|| { + format!( + "Failed to locate file '{}' in namespace '{}' from DM API", + file_name, namespace + ) + })?; + + response + .error_for_status() + .context("DM API locate request failed")? + .json() + .context("Failed to parse data locations response") + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..3fd79f0 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,236 @@ +mod api_client; +mod models; +mod mount; +mod oauth2_auth; + +use anyhow::{Context, Result}; +use clap::Parser; +use regex::Regex; +use std::collections::HashSet; +use std::env; +use std::process::exit; + +use api_client::ApiClient; +use models::{DataLocation, StorageAreaIDToNodeAndSite}; +use oauth2_auth::{authenticate, Tokens}; + +#[derive(Parser, Debug)] +#[command(name = "path-finder")] +#[command(about = "A tool for finding SKA data paths for mounting purposes")] +struct Args { + /// Namespace of the data + #[arg(long)] + namespace: String, + + /// Name of the data file + #[arg(long)] + file_name: String, + + /// Site name where data is staged + #[arg(long)] + site_name: String, + + /// Do not use OAuth2 for authentication - use environment variables instead + #[arg(long)] + no_login: bool, +} + +fn main() -> Result<()> { + let args = Args::parse(); + + check_privileges(&args)?; + + let tokens = if args.no_login { + get_tokens_from_env()? + } else { + println!("Authenticating with OAuth2..."); + let tokens = authenticate(true)?; + println!("Authentication successful!"); + tokens + }; + + run( + &args.namespace, + &args.file_name, + &args.site_name, + &tokens, + ) +} + +fn check_privileges(args: &Args) -> Result<()> { + // Check for root privileges early to avoid wasting time on API calls + #[cfg(unix)] + { + let euid = unsafe { libc::geteuid() }; + if euid != 0 { + eprintln!("\nError: This tool requires root privileges for mount operations."); + eprintln!("Please re-run with sudo:"); + eprintln!(" sudo -E path-finder --namespace {} --file_name {} --site_name {}", + args.namespace, args.file_name, args.site_name); + anyhow::bail!("Insufficient privileges - sudo required"); + } + + // Verify SUDO_USER is set + if env::var("SUDO_USER").is_err() { + eprintln!("\nWarning: SUDO_USER not set. Are you running as root directly?"); + eprintln!("Please use 'sudo' rather than running as root user."); + anyhow::bail!("SUDO_USER environment variable not set"); + } + } + + #[cfg(not(unix))] + { + anyhow::bail!("This tool is only supported on Unix systems"); + } + + Ok(()) +} + +fn get_tokens_from_env() -> Result { + let dm_token = env::var("DATA_MANAGEMENT_ACCESS_TOKEN") + .context("Please set DATA_MANAGEMENT_ACCESS_TOKEN environment variable or use --login flag")?; + + let sc_token = env::var("SITE_CAPABILITIES_ACCESS_TOKEN") + .context("Please set SITE_CAPABILITIES_ACCESS_TOKEN environment variable or use --login flag")?; + + Ok(Tokens { + data_management_token: dm_token, + site_capabilities_token: sc_token, + }) +} + +fn run(namespace: &str, file_name: &str, site_name: &str, tokens: &Tokens) -> Result<()> { + let client = ApiClient::new( + tokens.data_management_token.clone(), + tokens.site_capabilities_token.clone(), + ); + + client.check_namespace_available(namespace)?; + client.check_site_name_exists(site_name)?; + + let site_storages = client.site_storage_areas()?; + let data_locations = client.locate_data(namespace, file_name)?; + + print_data_locations_with_sites(&site_storages, &data_locations); + + if !is_data_located_at_site(site_name, &data_locations, &site_storages) { + println!( + "Data file '{}' in namespace '{}' is not located at site '{}'.", + file_name, namespace, site_name + ); + println!("Ensure that the data is staged to the site before proceeding."); + exit(1); + } + + let rse_path = extract_rse_path(&data_locations, namespace, file_name)?; + println!( + "RSE Path for file '{}' in namespace '{}': {}", + file_name, namespace, rse_path + ); + + mount_data(&rse_path, namespace)?; + + Ok(()) +} + +fn print_data_locations_with_sites( + site_stores: &StorageAreaIDToNodeAndSite, + data_locations: &[DataLocation], +) { + for location in data_locations { + if let Some((node_name, site_name, area_name)) = site_stores.get(&location.associated_storage_area_id) { + println!( + "Data location ID: {}, Storage Area: {} ({}), Node: {}, Site: {}", + location.identifier, area_name, location.associated_storage_area_id, node_name, site_name + ); + } else { + println!( + "Data location ID: {}, Storage Area ID: {}, Node/Site: Not found", + location.identifier, location.associated_storage_area_id + ); + } + } +} + +fn is_data_located_at_site( + site_name: &str, + data_locations: &[DataLocation], + site_stores: &StorageAreaIDToNodeAndSite, +) -> bool { + println!("\nData availability summary:"); + let mut found_at_site = false; + + for location in data_locations { + if let Some((node_name, site, area_name)) = site_stores.get(&location.associated_storage_area_id) { + println!(" - Storage Area: {} ({}) at Site: {} (Node: {})", + area_name, location.associated_storage_area_id, site, node_name); + if site == site_name { + found_at_site = true; + } + } else { + println!(" - Storage Area: {} at Site: Unknown", + location.associated_storage_area_id); + } + } + + found_at_site +} + +fn extract_rse_path( + data_locations: &[DataLocation], + namespace: &str, + file_name: &str, +) -> Result { + let pattern = format!(r"/{}/.*$", regex::escape(namespace)); + let rse_path_regex = Regex::new(&pattern)?; + + let mut matched_paths = HashSet::new(); + let mut unmatched_paths = Vec::new(); + + for location in data_locations { + for uri in &location.replicas { + if let Some(captures) = rse_path_regex.find(uri) { + matched_paths.insert(captures.as_str().to_string()); + } else { + unmatched_paths.push(uri.clone()); + } + } + } + + if !unmatched_paths.is_empty() { + println!( + "Warning: {} URIs did not match the expected pattern.", + unmatched_paths.len() + ); + println!("Unmatched URIs: {:?}", unmatched_paths); + } + + if matched_paths.is_empty() { + anyhow::bail!( + "No valid paths found for file '{}' in namespace '{}'.", + file_name, + namespace + ); + } + + if matched_paths.len() > 1 { + println!("Warning: Multiple unique paths found: {:?}", matched_paths); + println!("We should check the path for the local RSE - by cross-referencing with site capabilities."); + anyhow::bail!("Handling multiple matched paths is not implemented."); + } + + Ok(matched_paths.into_iter().next().unwrap()) +} + +fn mount_data(rse_path: &str, namespace: &str) -> Result<()> { + println!("Mounting data from RSE path: {} in namespace: {}", rse_path, namespace); + + // Get the original user (already verified in check_privileges()) + let sudo_user = env::var("SUDO_USER") + .context("SUDO_USER not set")?; + + mount::mount_operation(rse_path, namespace, &sudo_user)?; + println!("Successfully mounted {} in namespace {}", rse_path, namespace); + + Ok(()) +} diff --git a/src/models.rs b/src/models.rs new file mode 100644 index 0000000..378f659 --- /dev/null +++ b/src/models.rs @@ -0,0 +1,82 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DataLocation { + pub identifier: String, + pub associated_storage_area_id: String, + pub replicas: Vec, +} + +pub type DataLocationAPIResponse = Vec; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct StorageArea { + pub id: String, + #[serde(default)] + pub name: String, + #[serde(rename = "type", default)] + pub storage_type: String, + #[serde(default)] + pub relative_path: String, + #[serde(default)] + pub tier: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Storage { + pub id: String, + #[serde(default)] + pub name: String, + pub areas: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Site { + pub id: String, + pub name: String, + pub country: String, + pub storages: Vec, +} + +impl Site { + pub fn storage_areas(&self) -> Vec<&StorageArea> { + self.storages + .iter() + .flat_map(|storage| storage.areas.iter()) + .collect() + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Node { + pub name: String, + #[serde(default)] + pub description: String, + #[serde(default)] + pub sites: Vec, +} + +impl Node { + pub fn storage_area_id_to_site_name(&self) -> HashMap { + let mut mapping = HashMap::new(); + for site in &self.sites { + for area in site.storage_areas() { + mapping.insert(area.id.clone(), (self.name.clone(), site.name.clone(), area.name.clone())); + } + } + mapping + } +} + +pub type NodesAPIResponse = Vec; +pub type SitesAPIResponse = Vec; +pub type StorageAreaIDToNodeAndSite = HashMap; + +pub fn get_all_node_storage_areas(nodes: &[Node]) -> StorageAreaIDToNodeAndSite { + let mut storage_area_mapping = HashMap::new(); + for node in nodes { + storage_area_mapping.extend(node.storage_area_id_to_site_name()); + } + storage_area_mapping +} diff --git a/src/mount.rs b/src/mount.rs new file mode 100644 index 0000000..711ce68 --- /dev/null +++ b/src/mount.rs @@ -0,0 +1,184 @@ +use anyhow::{Context, Result}; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::Command; + +pub fn mount_operation(fits_path: &str, sudo_group: &str, sudo_user: &str) -> Result<()> { + let fits_path = Path::new(fits_path); + let fits_file = fits_path.file_name() + .context("Invalid FITS path")? + .to_str() + .context("Invalid UTF-8 in filename")?; + + let fits_dir = fits_path.parent().and_then(|p| p.to_str()).unwrap_or(""); + + // Extract the bind name from the filename (remove extension) + let bind_name = fits_file + .rsplit_once('.') + .map(|(base, _)| base) + .unwrap_or(fits_file); + + let home = PathBuf::from("/home").join(sudo_user); + let bind_dir = home.join(".binds").join(bind_name); + let projects_dir = home.join("projects"); + let projects_file = projects_dir.join(fits_file); + let skadata_src = PathBuf::from("/skadata").join(sudo_group).join(fits_dir); + + // Check if already mounted + if is_mountpoint(&bind_dir)? { + anyhow::bail!( + "Error: {} is already mounted.", + bind_dir.display() + ); + } + + // Create directories + fs::create_dir_all(&bind_dir) + .with_context(|| format!("Failed to create {}", bind_dir.display()))?; + fs::create_dir_all(&projects_dir) + .with_context(|| format!("Failed to create {}", projects_dir.display()))?; + + // Touch projects file + fs::OpenOptions::new() + .create(true) + .write(true) + .open(&projects_file) + .with_context(|| format!("Failed to create placeholder file {}", projects_file.display()))?; + + // Set ownership and permissions + let user_group = format!("{}:{}", sudo_user, sudo_user); + + run_command( + "chown", + &["-R", &user_group, home.join(".binds").to_str().unwrap()], + "Set ownership of .binds directory", + )?; + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = fs::metadata(&bind_dir)?.permissions(); + perms.set_mode(0o600); + fs::set_permissions(&bind_dir, perms)?; + } + + run_command( + "chown", + &["-R", &user_group, projects_dir.to_str().unwrap()], + "Set ownership of projects directory", + )?; + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = fs::metadata(&projects_file)?.permissions(); + perms.set_mode(0o500); + fs::set_permissions(&projects_file, perms)?; + } + + // Run bindfs + run_command( + "bindfs", + &[ + "--perms=0700", + &format!("--force-user={}", sudo_user), + &format!("--force-group={}", sudo_user), + skadata_src.to_str().unwrap(), + bind_dir.to_str().unwrap(), + ], + "Mount with bindfs", + )?; + + // Bind mount the file + let source_file = bind_dir.join(fits_file); + run_command( + "mount", + &[ + "--bind", + source_file.to_str().unwrap(), + projects_file.to_str().unwrap(), + ], + "Bind mount file", + )?; + + // Verify mount + if is_mountpoint(&projects_file)? { + println!( + "Mount verification successful: {} is mounted at {}", + fits_file, + projects_file.display() + ); + } else { + anyhow::bail!( + "Error: Mount verification failed for {} at {}", + fits_file, + projects_file.display() + ); + } + + Ok(()) +} + +pub fn unmount_operation(fits_path: &str, sudo_user: &str) -> Result<()> { + let fits_path = Path::new(fits_path); + let fits_file = fits_path.file_name() + .context("Invalid FITS path")? + .to_str() + .context("Invalid UTF-8 in filename")?; + + let bind_name = fits_file + .rsplit_once('.') + .map(|(base, _)| base) + .unwrap_or(fits_file); + + let home = PathBuf::from("/home").join(sudo_user); + let bind_dir = home.join(".binds").join(bind_name); + let projects_file = home.join("projects").join(fits_file); + + // Unmount (ignore errors if not mounted) + let _ = run_command("umount", &[projects_file.to_str().unwrap()], "Unmount projects file"); + let _ = run_command("umount", &[bind_dir.to_str().unwrap()], "Unmount bind directory"); + + // Remove directories/files + if bind_dir.exists() { + fs::remove_dir_all(&bind_dir) + .with_context(|| format!("Failed to remove {}", bind_dir.display()))?; + } + + if projects_file.exists() { + fs::remove_file(&projects_file) + .with_context(|| format!("Failed to remove {}", projects_file.display()))?; + } + + println!("Unmounted {} from {}", fits_file, projects_file.display()); + + Ok(()) +} + +fn is_mountpoint(path: &Path) -> Result { + let output = Command::new("mountpoint") + .arg("-q") + .arg(path) + .output() + .context("Failed to execute mountpoint command")?; + + Ok(output.status.success()) +} + +fn run_command(cmd: &str, args: &[&str], description: &str) -> Result<()> { + let output = Command::new(cmd) + .args(args) + .output() + .with_context(|| format!("Failed to execute: {} {}", cmd, args.join(" ")))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + anyhow::bail!( + "{} failed: {}", + description, + stderr.trim() + ); + } + + Ok(()) +} diff --git a/src/oauth2_auth.rs b/src/oauth2_auth.rs new file mode 100644 index 0000000..d25d375 --- /dev/null +++ b/src/oauth2_auth.rs @@ -0,0 +1,264 @@ +use anyhow::{Context, Result}; +use reqwest::blocking::Client; +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::PathBuf; +use std::thread; +use std::time::{Duration, SystemTime}; + +const AUTHN_BASE_URL: &str = "https://authn.srcnet.skao.int/api/v1"; +const DATA_MANAGEMENT: &str = "data-management-api"; +const SITE_CAPABILITIES: &str = "site-capabilities-api"; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Tokens { + pub data_management_token: String, + pub site_capabilities_token: String, +} + +#[derive(Debug, Deserialize)] +struct DeviceCodeResponse { + device_code: String, + user_code: String, + verification_uri: String, + #[serde(default = "default_interval")] + interval: u64, + #[serde(default)] + _expires_in: Option, +} + +fn default_interval() -> u64 { + 5 +} + +#[derive(Debug, Deserialize)] +struct TokenResponse { + token: Option, + access_token: Option, + error: Option, + error_description: Option, + detail: Option, +} + +#[derive(Debug, Deserialize)] +struct TokenData { + access_token: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct CachedTokens { + tokens: Tokens, + expires_at: u64, +} + +pub fn authenticate(use_cache: bool) -> Result { + if use_cache { + if let Some(cached) = load_tokens_from_cache()? { + return Ok(cached); + } + } + + let client = Client::new(); + + let device_info = initiate_device_code_flow(&client)?; + display_user_instructions(&device_info); + + let auth_token = poll_for_authentication(&client, &device_info.device_code, device_info.interval)?; + + let dm_token = exchange_token_for_api_token(&client, &auth_token, DATA_MANAGEMENT)?; + let sc_token = exchange_token_for_api_token(&client, &auth_token, SITE_CAPABILITIES)?; + + let tokens = Tokens { + data_management_token: dm_token, + site_capabilities_token: sc_token, + }; + + save_tokens_to_cache(&tokens, 3600)?; + + Ok(tokens) +} + +fn initiate_device_code_flow(client: &Client) -> Result { + let url = format!("{}/login/device", AUTHN_BASE_URL); + let response = client + .get(&url) + .timeout(Duration::from_secs(10)) + .send() + .context("Failed to initiate device code flow")?; + + response + .error_for_status() + .context("Device code flow request failed")? + .json() + .context("Failed to parse device code response") +} + +fn display_user_instructions(device_info: &DeviceCodeResponse) { + println!("\nACTION REQUIRED:"); + println!(" Open this URL in a browser and authenticate: {}?user_code={}", + device_info.verification_uri, device_info.user_code); + println!("\nWaiting for authentication (timeout: 5 minutes)..."); +} + +fn poll_for_authentication(client: &Client, device_code: &str, mut interval: u64) -> Result { + let timeout = Duration::from_secs(300); + let start = SystemTime::now(); + + loop { + if start.elapsed()? > timeout { + anyhow::bail!("Authorization timeout. Please try again."); + } + + let url = format!("{}/token", AUTHN_BASE_URL); + let response = client + .get(&url) + .query(&[("device_code", device_code)]) + .timeout(Duration::from_secs(10)) + .send() + .context("Failed to poll for authentication")?; + + if response.status().is_success() { + let token_data: TokenResponse = response.json()?; + + if let Some(token) = token_data.token { + return Ok(token.access_token); + } else if let Some(access_token) = token_data.access_token { + return Ok(access_token); + } else { + anyhow::bail!("No access token in response"); + } + } + + let error_data: TokenResponse = response.json()?; + let error = parse_error_response(&error_data); + + match error.as_deref() { + Some("authorization_pending") => { + thread::sleep(Duration::from_secs(interval)); + continue; + } + Some("slow_down") => { + interval += 5; + thread::sleep(Duration::from_secs(interval)); + continue; + } + Some("expired_token") => { + anyhow::bail!("Device code expired. Please try again."); + } + Some("access_denied") => { + anyhow::bail!("User denied authorization."); + } + Some(err) => { + let msg = error_data.error_description + .map(|d| format!("{}: {}", err, d)) + .unwrap_or_else(|| err.to_string()); + anyhow::bail!("Authorization error: {}", msg); + } + None => { + anyhow::bail!("Unknown authorization error"); + } + } + } +} + +fn parse_error_response(error_data: &TokenResponse) -> Option { + if let Some(detail) = &error_data.detail { + // Try to extract JSON from "response: {...}" pattern + if let Some(start) = detail.find("response:") { + let json_part = &detail[start + 9..].trim(); + if let Ok(embedded) = serde_json::from_str::(json_part) { + if embedded.error.is_some() { + return embedded.error; + } + } + } + } + error_data.error.clone() +} + +fn exchange_token_for_api_token(client: &Client, auth_token: &str, api_name: &str) -> Result { + let url = format!("{}/token/exchange/{}", AUTHN_BASE_URL, api_name); + let response = client + .get(&url) + .header("Content-Type", "application/json") + .query(&[ + ("version", "latest"), + ("try_use_cache", "false"), + ("access_token", auth_token), + ]) + .timeout(Duration::from_secs(10)) + .send() + .with_context(|| format!("Failed to exchange token for {} API", api_name))?; + + let token_data: TokenResponse = response + .error_for_status() + .with_context(|| format!("Token exchange failed for {}", api_name))? + .json()?; + + if let Some(token) = token_data.token { + Ok(token.access_token) + } else if let Some(access_token) = token_data.access_token { + Ok(access_token) + } else { + anyhow::bail!("No access token in response for {}", api_name) + } +} + +fn get_token_cache_path() -> Result { + let config_dir = dirs::config_dir() + .context("Failed to find config directory")? + .join("path-finder"); + + fs::create_dir_all(&config_dir)?; + Ok(config_dir.join("tokens.json")) +} + +fn save_tokens_to_cache(tokens: &Tokens, expires_in: u64) -> Result<()> { + let cache_path = get_token_cache_path()?; + let expires_at = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH)? + .as_secs() + expires_in; + + let cached = CachedTokens { + tokens: tokens.clone(), + expires_at, + }; + + let json = serde_json::to_string_pretty(&cached)?; + fs::write(&cache_path, json)?; + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = fs::metadata(&cache_path)?.permissions(); + perms.set_mode(0o600); + fs::set_permissions(&cache_path, perms)?; + } + + println!("Tokens cached for {} seconds", expires_in); + Ok(()) +} + +fn load_tokens_from_cache() -> Result> { + let cache_path = get_token_cache_path()?; + + if !cache_path.exists() { + return Ok(None); + } + + let contents = fs::read_to_string(&cache_path)?; + let cached: CachedTokens = serde_json::from_str(&contents) + .context("Invalid cache file")?; + + let now = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH)? + .as_secs(); + + if now >= cached.expires_at { + println!("Cached tokens expired"); + return Ok(None); + } + + println!("Using cached tokens"); + Ok(Some(cached.tokens)) +} From 4671d4e5c908e7e369425850f20ad2b3c6df220d Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 23 Feb 2026 16:31:37 +0000 Subject: [PATCH 02/27] feat(SOG-480): Add GH action to build Linux executable --- .github/workflows/build.yml | 42 +++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..03ce8d7 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,42 @@ +name: Build + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + build-linux: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v4 + + - name: Install Rust + uses: actions-rust-lang/setup-rust-toolchain@v1 + + - name: Build for Linux + run: cargo build --release --verbose + + - name: Upload Linux binary + uses: actions/upload-artifact@v4 + with: + name: path-finder-linux-x64 + path: target/release/path-finder + + build-macos: + runs-on: macos-latest + steps: + - uses: actions/checkout@v4 + + - name: Install Rust + uses: actions-rust-lang/setup-rust-toolchain@v1 + + - name: Build for macOS + run: cargo build --release --verbose + + - name: Upload macOS binary + uses: actions/upload-artifact@v4 + with: + name: path-finder-macos-arm64 + path: target/release/path-finder From 2a7ec519dfb164765640605de757a383026d71de Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 23 Feb 2026 16:39:51 +0000 Subject: [PATCH 03/27] feat(SOG-480): Remove MacOS build from GH Action --- .github/workflows/build.yml | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 03ce8d7..16529f2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -24,19 +24,19 @@ jobs: name: path-finder-linux-x64 path: target/release/path-finder - build-macos: - runs-on: macos-latest - steps: - - uses: actions/checkout@v4 - - - name: Install Rust - uses: actions-rust-lang/setup-rust-toolchain@v1 - - - name: Build for macOS - run: cargo build --release --verbose - - - name: Upload macOS binary - uses: actions/upload-artifact@v4 - with: - name: path-finder-macos-arm64 - path: target/release/path-finder + # build-macos: + # runs-on: macos-latest + # steps: + # - uses: actions/checkout@v4 + + # - name: Install Rust + # uses: actions-rust-lang/setup-rust-toolchain@v1 + + # - name: Build for macOS + # run: cargo build --release --verbose + + # - name: Upload macOS binary + # uses: actions/upload-artifact@v4 + # with: + # name: path-finder-macos-arm64 + # path: target/release/path-finder From 019196a61f04e58b3d8050df9b42ef9975bf2fe8 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 23 Feb 2026 16:44:29 +0000 Subject: [PATCH 04/27] fix(SOG-480): Put Linux build on actually available machine --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 16529f2..da9fbc1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,7 +8,7 @@ on: jobs: build-linux: - runs-on: ubuntu-20.04 + runs-on: ubuntu-slim steps: - uses: actions/checkout@v4 From 1cf8ee16c29c84464825083282d4a0df1475d1cd Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 23 Feb 2026 16:48:12 +0000 Subject: [PATCH 05/27] fix(SOG-480): Upgrade Linux action runner to 4 cores --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index da9fbc1..b36e11d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,7 +8,7 @@ on: jobs: build-linux: - runs-on: ubuntu-slim + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 From 09eaedbb75f8dc048b451e3ba3ed0b9efbecfe85 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 23 Feb 2026 16:52:58 +0000 Subject: [PATCH 06/27] fix(SOG-480): Make CLI have unmount option --- src/main.rs | 39 ++++++++++++++++++++++++++++++--------- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/src/main.rs b/src/main.rs index 3fd79f0..0be7cd0 100644 --- a/src/main.rs +++ b/src/main.rs @@ -26,13 +26,17 @@ struct Args { #[arg(long)] file_name: String, - /// Site name where data is staged - #[arg(long)] - site_name: String, + /// Site name where data is staged (not required for unmount) + #[arg(long, required_unless_present = "unmount")] + site_name: Option, /// Do not use OAuth2 for authentication - use environment variables instead #[arg(long)] no_login: bool, + + /// Unmount previously mounted data instead of mounting + #[arg(long)] + unmount: bool, } fn main() -> Result<()> { @@ -40,6 +44,18 @@ fn main() -> Result<()> { check_privileges(&args)?; + // Handle unmount operation (no API calls needed) + if args.unmount { + let sudo_user = env::var("SUDO_USER") + .context("SUDO_USER not set")?; + + let fits_path = format!("/{}/{}", args.namespace, args.file_name); + mount::unmount_operation(&fits_path, &sudo_user)?; + println!("Successfully unmounted {} from namespace {}", args.file_name, args.namespace); + return Ok(()); + } + + // Mount operation requires authentication and API calls let tokens = if args.no_login { get_tokens_from_env()? } else { @@ -52,7 +68,7 @@ fn main() -> Result<()> { run( &args.namespace, &args.file_name, - &args.site_name, + args.site_name.as_ref().unwrap(), &tokens, ) } @@ -63,10 +79,15 @@ fn check_privileges(args: &Args) -> Result<()> { { let euid = unsafe { libc::geteuid() }; if euid != 0 { - eprintln!("\nError: This tool requires root privileges for mount operations."); + eprintln!("\nError: This tool requires root privileges for mount/unmount operations."); eprintln!("Please re-run with sudo:"); - eprintln!(" sudo -E path-finder --namespace {} --file_name {} --site_name {}", - args.namespace, args.file_name, args.site_name); + if args.unmount { + eprintln!(" sudo -E path-finder --namespace {} --file_name {} --unmount", + args.namespace, args.file_name); + } else { + eprintln!(" sudo -E path-finder --namespace {} --file_name {} --site_name {}", + args.namespace, args.file_name, args.site_name.as_deref().unwrap_or("")); + } anyhow::bail!("Insufficient privileges - sudo required"); } @@ -88,10 +109,10 @@ fn check_privileges(args: &Args) -> Result<()> { fn get_tokens_from_env() -> Result { let dm_token = env::var("DATA_MANAGEMENT_ACCESS_TOKEN") - .context("Please set DATA_MANAGEMENT_ACCESS_TOKEN environment variable or use --login flag")?; + .context("Please set DATA_MANAGEMENT_ACCESS_TOKEN environment variable or omit --no-login to use OAuth2")?; let sc_token = env::var("SITE_CAPABILITIES_ACCESS_TOKEN") - .context("Please set SITE_CAPABILITIES_ACCESS_TOKEN environment variable or use --login flag")?; + .context("Please set SITE_CAPABILITIES_ACCESS_TOKEN environment variable or omit --no-login to use OAuth2")?; Ok(Tokens { data_management_token: dm_token, From ea54a70916a7c809086f94fbcf268634c892a77f Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Tue, 24 Feb 2026 13:28:03 +0000 Subject: [PATCH 07/27] feat(SOG-480): Rename executable to remove hyphen --- .github/workflows/build.yml | 4 ++-- Cargo.toml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b36e11d..717f18f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -21,8 +21,8 @@ jobs: - name: Upload Linux binary uses: actions/upload-artifact@v4 with: - name: path-finder-linux-x64 - path: target/release/path-finder + name: pathfinder-linux-x64 + path: target/x86_64-unknown-linux-gnu/release/pathfinder # build-macos: # runs-on: macos-latest diff --git a/Cargo.toml b/Cargo.toml index 80ed59c..9e812f0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,10 +1,10 @@ [package] -name = "path-finder" +name = "pathfinder" version = "0.1.0" edition = "2021" [[bin]] -name = "path-finder" +name = "pathfinder" path = "src/main.rs" [dependencies] From 028ba67add34cfd61beef4ce2bdbcfd49ca26f00 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Tue, 24 Feb 2026 13:28:33 +0000 Subject: [PATCH 08/27] fix(SOG-480): Debug mount source path mismatch --- src/mount.rs | 87 ++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 64 insertions(+), 23 deletions(-) diff --git a/src/mount.rs b/src/mount.rs index 711ce68..7d20768 100644 --- a/src/mount.rs +++ b/src/mount.rs @@ -3,31 +3,69 @@ use std::fs; use std::path::{Path, PathBuf}; use std::process::Command; -pub fn mount_operation(fits_path: &str, sudo_group: &str, sudo_user: &str) -> Result<()> { - let fits_path = Path::new(fits_path); - let fits_file = fits_path.file_name() +/// Mounts a data file from the RSE storage to the user's home directory using bindfs. +/// +/// Creates necessary directories and bind mounts to make the data file accessible to the user +/// with appropriate permissions. The file is mounted to `~/.binds/` and linked to +/// `~/projects//`. +/// +/// # Parameters +/// +/// * `data_path` - Full path to the data file on the RSE storage. +/// Example: `"/daac/08/06/2022-01-01_12-00-00.fits"` +/// +/// * `sudo_group` - The namespace/group for the data +/// Example: `"daac"` +/// +/// * `sudo_user` - The username of the user running the command (from SUDO_USER environment variable). +/// Example: `"jsmith"` +/// +/// # Returns +/// +/// Returns `Ok(())` on success, or an error if any step fails (directory creation, mounting, etc.). +/// +/// # Example +/// +/// ```no_run +/// mount_operation("/daac/08/06/2022-01-01_12-00-00.fits", "daac", "jsmith")?; +/// ``` +pub fn mount_operation(data_path: &str, sudo_group: &str, sudo_user: &str) -> Result<()> { + let data_path = Path::new(data_path); + let data_file = data_path.file_name() .context("Invalid FITS path")? .to_str() - .context("Invalid UTF-8 in filename")?; + .context("Invalid characters in filename that cannot be represented in UTF-8")?; - let fits_dir = fits_path.parent().and_then(|p| p.to_str()).unwrap_or(""); + let data_dir = data_path.parent() + .and_then(|p| p.to_str()) + .unwrap_or("") + .trim_start_matches('/'); // Strip leading slash for proper path joining // Extract the bind name from the filename (remove extension) - let bind_name = fits_file + let bind_name = data_file .rsplit_once('.') .map(|(base, _)| base) - .unwrap_or(fits_file); + .unwrap_or(data_file); let home = PathBuf::from("/home").join(sudo_user); let bind_dir = home.join(".binds").join(bind_name); - let projects_dir = home.join("projects"); - let projects_file = projects_dir.join(fits_file); - let skadata_src = PathBuf::from("/skadata").join(sudo_group).join(fits_dir); - - // Check if already mounted + let projects_dir = home.join("projects").join(sudo_group); + let projects_file = projects_dir.join(data_file); + // TODO: Read the SKA data base path (default: `/skadata`) from config or env variable instead of hardcoding - check it exists at startup + let skadata_src = PathBuf::from("/skadata").join(data_dir); + + // Output debug information about paths being used + println!("Data file: {}", data_file); + println!("Bind name: {}", bind_name); + println!("SKA data source path: {}", skadata_src.display()); + println!("Bind directory: {}", bind_dir.display()); + println!("Projects directory: {}", projects_dir.display()); + println!("Projects file: {}", projects_file.display()); + + // TODO: Check if already mounted - if so, check that the file is also mounted to the projects directory; if both true: bail if is_mountpoint(&bind_dir)? { anyhow::bail!( - "Error: {} is already mounted.", + "{} is already mounted.", bind_dir.display() ); } @@ -90,7 +128,7 @@ pub fn mount_operation(fits_path: &str, sudo_group: &str, sudo_user: &str) -> Re )?; // Bind mount the file - let source_file = bind_dir.join(fits_file); + let source_file = bind_dir.join(data_file); run_command( "mount", &[ @@ -105,13 +143,13 @@ pub fn mount_operation(fits_path: &str, sudo_group: &str, sudo_user: &str) -> Re if is_mountpoint(&projects_file)? { println!( "Mount verification successful: {} is mounted at {}", - fits_file, + data_file, projects_file.display() ); } else { anyhow::bail!( "Error: Mount verification failed for {} at {}", - fits_file, + data_file, projects_file.display() ); } @@ -119,21 +157,21 @@ pub fn mount_operation(fits_path: &str, sudo_group: &str, sudo_user: &str) -> Re Ok(()) } -pub fn unmount_operation(fits_path: &str, sudo_user: &str) -> Result<()> { - let fits_path = Path::new(fits_path); - let fits_file = fits_path.file_name() +pub fn unmount_operation(data_path: &str, sudo_user: &str) -> Result<()> { + let data_path = Path::new(data_path); + let data_file = data_path.file_name() .context("Invalid FITS path")? .to_str() .context("Invalid UTF-8 in filename")?; - let bind_name = fits_file + let bind_name = data_file .rsplit_once('.') .map(|(base, _)| base) - .unwrap_or(fits_file); + .unwrap_or(data_file); let home = PathBuf::from("/home").join(sudo_user); let bind_dir = home.join(".binds").join(bind_name); - let projects_file = home.join("projects").join(fits_file); + let projects_file = home.join("projects").join(data_file); // Unmount (ignore errors if not mounted) let _ = run_command("umount", &[projects_file.to_str().unwrap()], "Unmount projects file"); @@ -150,7 +188,7 @@ pub fn unmount_operation(fits_path: &str, sudo_user: &str) -> Result<()> { .with_context(|| format!("Failed to remove {}", projects_file.display()))?; } - println!("Unmounted {} from {}", fits_file, projects_file.display()); + println!("Unmounted {} from {}", data_file, projects_file.display()); Ok(()) } @@ -166,6 +204,9 @@ fn is_mountpoint(path: &Path) -> Result { } fn run_command(cmd: &str, args: &[&str], description: &str) -> Result<()> { + + println!("Running command: {} {}", cmd, args.join(" ")); + let output = Command::new(cmd) .args(args) .output() From 920cb11cf077c060cf8d2a9b35bde3f75bc3b187 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Thu, 26 Feb 2026 18:09:23 +0000 Subject: [PATCH 09/27] fix(SOG-480): Get local build with act working --- .actrc | 1 + .github/workflows/build.yml | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 .actrc diff --git a/.actrc b/.actrc new file mode 100644 index 0000000..d515c69 --- /dev/null +++ b/.actrc @@ -0,0 +1 @@ +--container-architecture linux/amd64 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 717f18f..d92cfee 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -14,11 +14,14 @@ jobs: - name: Install Rust uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + cache: ${{ !env.ACT }} - name: Build for Linux - run: cargo build --release --verbose + run: cargo build --release --target x86_64-unknown-linux-gnu --verbose - name: Upload Linux binary + if: ${{ !env.ACT }} uses: actions/upload-artifact@v4 with: name: pathfinder-linux-x64 From 336eb67eefd5e8b0cac8161569b3fb1266e95c32 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Thu, 26 Feb 2026 18:14:57 +0000 Subject: [PATCH 10/27] fix(SOG-480): Fix unmount code --- src/main.rs | 2 +- src/mount.rs | 17 ++++++++++++----- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/src/main.rs b/src/main.rs index 0be7cd0..9e5a144 100644 --- a/src/main.rs +++ b/src/main.rs @@ -50,7 +50,7 @@ fn main() -> Result<()> { .context("SUDO_USER not set")?; let fits_path = format!("/{}/{}", args.namespace, args.file_name); - mount::unmount_operation(&fits_path, &sudo_user)?; + mount::unmount_operation(&fits_path, &args.namespace, &sudo_user)?; println!("Successfully unmounted {} from namespace {}", args.file_name, args.namespace); return Ok(()); } diff --git a/src/mount.rs b/src/mount.rs index 7d20768..4fa1947 100644 --- a/src/mount.rs +++ b/src/mount.rs @@ -14,7 +14,7 @@ use std::process::Command; /// * `data_path` - Full path to the data file on the RSE storage. /// Example: `"/daac/08/06/2022-01-01_12-00-00.fits"` /// -/// * `sudo_group` - The namespace/group for the data +/// * `namespace` - The namespace/group for the data /// Example: `"daac"` /// /// * `sudo_user` - The username of the user running the command (from SUDO_USER environment variable). @@ -29,7 +29,7 @@ use std::process::Command; /// ```no_run /// mount_operation("/daac/08/06/2022-01-01_12-00-00.fits", "daac", "jsmith")?; /// ``` -pub fn mount_operation(data_path: &str, sudo_group: &str, sudo_user: &str) -> Result<()> { +pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Result<()> { let data_path = Path::new(data_path); let data_file = data_path.file_name() .context("Invalid FITS path")? @@ -49,7 +49,7 @@ pub fn mount_operation(data_path: &str, sudo_group: &str, sudo_user: &str) -> Re let home = PathBuf::from("/home").join(sudo_user); let bind_dir = home.join(".binds").join(bind_name); - let projects_dir = home.join("projects").join(sudo_group); + let projects_dir = home.join("projects").join(namespace); let projects_file = projects_dir.join(data_file); // TODO: Read the SKA data base path (default: `/skadata`) from config or env variable instead of hardcoding - check it exists at startup let skadata_src = PathBuf::from("/skadata").join(data_dir); @@ -157,7 +157,7 @@ pub fn mount_operation(data_path: &str, sudo_group: &str, sudo_user: &str) -> Re Ok(()) } -pub fn unmount_operation(data_path: &str, sudo_user: &str) -> Result<()> { +pub fn unmount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Result<()> { let data_path = Path::new(data_path); let data_file = data_path.file_name() .context("Invalid FITS path")? @@ -171,7 +171,14 @@ pub fn unmount_operation(data_path: &str, sudo_user: &str) -> Result<()> { let home = PathBuf::from("/home").join(sudo_user); let bind_dir = home.join(".binds").join(bind_name); - let projects_file = home.join("projects").join(data_file); + let projects_dir = home.join("projects").join(namespace); + let projects_file = projects_dir.join(data_file); + + println!("Data file: {}", data_file); + println!("Bind name: {}", bind_name); + println!("Bind directory: {}", bind_dir.display()); + println!("Projects directory: {}", projects_dir.display()); + println!("Projects file: {}", projects_file.display()); // Unmount (ignore errors if not mounted) let _ = run_command("umount", &[projects_file.to_str().unwrap()], "Unmount projects file"); From 0e17e23fe8ccda3f230f747012f5b337842e6416 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Thu, 26 Feb 2026 18:48:44 +0000 Subject: [PATCH 11/27] chore(SOG-480): Remove debugging print statements --- src/main.rs | 1 - src/mount.rs | 16 ---------------- 2 files changed, 17 deletions(-) diff --git a/src/main.rs b/src/main.rs index 9e5a144..f350612 100644 --- a/src/main.rs +++ b/src/main.rs @@ -51,7 +51,6 @@ fn main() -> Result<()> { let fits_path = format!("/{}/{}", args.namespace, args.file_name); mount::unmount_operation(&fits_path, &args.namespace, &sudo_user)?; - println!("Successfully unmounted {} from namespace {}", args.file_name, args.namespace); return Ok(()); } diff --git a/src/mount.rs b/src/mount.rs index 4fa1947..6efcf27 100644 --- a/src/mount.rs +++ b/src/mount.rs @@ -54,14 +54,6 @@ pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Res // TODO: Read the SKA data base path (default: `/skadata`) from config or env variable instead of hardcoding - check it exists at startup let skadata_src = PathBuf::from("/skadata").join(data_dir); - // Output debug information about paths being used - println!("Data file: {}", data_file); - println!("Bind name: {}", bind_name); - println!("SKA data source path: {}", skadata_src.display()); - println!("Bind directory: {}", bind_dir.display()); - println!("Projects directory: {}", projects_dir.display()); - println!("Projects file: {}", projects_file.display()); - // TODO: Check if already mounted - if so, check that the file is also mounted to the projects directory; if both true: bail if is_mountpoint(&bind_dir)? { anyhow::bail!( @@ -174,12 +166,6 @@ pub fn unmount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> R let projects_dir = home.join("projects").join(namespace); let projects_file = projects_dir.join(data_file); - println!("Data file: {}", data_file); - println!("Bind name: {}", bind_name); - println!("Bind directory: {}", bind_dir.display()); - println!("Projects directory: {}", projects_dir.display()); - println!("Projects file: {}", projects_file.display()); - // Unmount (ignore errors if not mounted) let _ = run_command("umount", &[projects_file.to_str().unwrap()], "Unmount projects file"); let _ = run_command("umount", &[bind_dir.to_str().unwrap()], "Unmount bind directory"); @@ -212,8 +198,6 @@ fn is_mountpoint(path: &Path) -> Result { fn run_command(cmd: &str, args: &[&str], description: &str) -> Result<()> { - println!("Running command: {} {}", cmd, args.join(" ")); - let output = Command::new(cmd) .args(args) .output() From 489fe052f1c970ab17abe19caabdea964a41b29d Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Thu, 19 Mar 2026 13:53:05 +0000 Subject: [PATCH 12/27] feat(SOG-480): Remove need for site_name CLI arg --- .github/workflows/build.yml | 17 ------- RUST_README.md | 6 +-- src/api_client.rs | 97 ++++++++++++++++++------------------- src/main.rs | 56 ++++++--------------- src/models.rs | 10 ++-- 5 files changed, 74 insertions(+), 112 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d92cfee..a6fa4e0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -26,20 +26,3 @@ jobs: with: name: pathfinder-linux-x64 path: target/x86_64-unknown-linux-gnu/release/pathfinder - - # build-macos: - # runs-on: macos-latest - # steps: - # - uses: actions/checkout@v4 - - # - name: Install Rust - # uses: actions-rust-lang/setup-rust-toolchain@v1 - - # - name: Build for macOS - # run: cargo build --release --verbose - - # - name: Upload macOS binary - # uses: actions/upload-artifact@v4 - # with: - # name: path-finder-macos-arm64 - # path: target/release/path-finder diff --git a/RUST_README.md b/RUST_README.md index 7402c76..c4a50bc 100644 --- a/RUST_README.md +++ b/RUST_README.md @@ -55,8 +55,7 @@ With OAuth2 authentication (recommended): ```bash path-finder \ --namespace daac \ - --file_name pi24_test_run_1_cleaned.fits \ - --site_name UKSRC-CAM-PREPROD + --file_name pi24_test_run_1_cleaned.fits ``` With environment variables (for automation): @@ -68,10 +67,11 @@ export SITE_CAPABILITIES_ACCESS_TOKEN="your_token_here" path-finder \ --namespace daac \ --file_name pi24_test_run_1_cleaned.fits \ - --site_name UKSRC-CAM-PREPROD \ --no-login ``` +**Note**: The tool will automatically check if the file exists locally at `/skadata`. If the file is not found locally, it will display the sites where the file is available and prompt you to ensure the data has been staged to your local site before mounting. + ### Mount Utility The mount utility is designed to be called with sudo privileges. It handles: diff --git a/src/api_client.rs b/src/api_client.rs index 7e34d2d..8c73a10 100644 --- a/src/api_client.rs +++ b/src/api_client.rs @@ -1,6 +1,6 @@ +use crate::models::*; use anyhow::{Context, Result}; use reqwest::blocking::Client; -use crate::models::*; const DM_API_BASEURL: &str = "https://data-management.srcnet.skao.int/api/v1"; const SC_API_BASEURL: &str = "https://site-capabilities.srcnet.skao.int/api/v1"; @@ -20,21 +20,6 @@ impl ApiClient { } } - pub fn get_all_namespaces(&self) -> Result> { - let url = format!("{}/data/list", DM_API_BASEURL); - let response = self.client - .get(&url) - .bearer_auth(&self.dm_token) - .send() - .context("Failed to request namespaces from DM API")?; - - response - .error_for_status() - .context("DM API request failed")? - .json() - .context("Failed to parse namespaces response") - } - pub fn check_namespace_available(&self, namespace: &str) -> Result<()> { let namespaces = self.get_all_namespaces()?; if !namespaces.contains(&namespace.to_string()) { @@ -47,56 +32,57 @@ impl ApiClient { Ok(()) } - pub fn all_site_names(&self) -> Result> { - let url = format!("{}/sites", SC_API_BASEURL); - let response = self.client + pub fn get_all_namespaces(&self) -> Result> { + let url = format!("{}/data/list", DM_API_BASEURL); + let response = self + .client .get(&url) - .bearer_auth(&self.sc_token) + .bearer_auth(&self.dm_token) .send() - .context("Failed to request sites from SC API")?; + .context("Failed to request namespaces from DM API")?; - let sites: SitesAPIResponse = response + response .error_for_status() - .context("SC API request failed")? + .context("DM API request failed")? .json() - .context("Failed to parse sites response")?; - - Ok(sites.into_iter().map(|site| site.name).collect()) - } - - pub fn check_site_name_exists(&self, site_name: &str) -> Result<()> { - let sites = self.all_site_names()?; - if !sites.contains(&site_name.to_string()) { - eprintln!( - "Error: Site name '{}' not found in available sites:\n\n{}", - site_name, - sites.join(", ") - ); - std::process::exit(1); - } - Ok(()) + .context("Failed to parse namespaces response") } pub fn site_storage_areas(&self) -> Result { let url = format!("{}/nodes", SC_API_BASEURL); - let response = self.client + let response = self + .client .get(&url) .bearer_auth(&self.sc_token) .send() .context("Failed to request nodes from SC API")?; - let nodes: NodesAPIResponse = response + let response = response .error_for_status() - .context("SC API request failed")? - .json() - .context("Failed to parse nodes response")?; + .context("SC API request failed")?; + + let response_text = response.text() + .context("Failed to read response body")?; + + let nodes: NodesAPIResponse = serde_json::from_str(&response_text) + .with_context(|| { + format!( + "Failed to parse nodes response. Response body:\n{}", + if response_text.len() > 1000 { + format!("{}... (truncated)", &response_text[..1000]) + } else { + response_text.clone() + } + ) + })?; Ok(get_all_node_storage_areas(&nodes)) } pub fn locate_data(&self, namespace: &str, file_name: &str) -> Result { let url = format!("{}/data/locate/{}/{}", DM_API_BASEURL, namespace, file_name); - let response = self.client + let response = self + .client .get(&url) .bearer_auth(&self.dm_token) .send() @@ -107,10 +93,23 @@ impl ApiClient { ) })?; - response + let response = response .error_for_status() - .context("DM API locate request failed")? - .json() - .context("Failed to parse data locations response") + .context("DM API locate request failed")?; + + let response_text = response.text() + .context("Failed to read response body")?; + + serde_json::from_str(&response_text) + .with_context(|| { + format!( + "Failed to parse data locations response. Response body:\n{}", + if response_text.len() > 1000 { + format!("{}... (truncated)", &response_text[..1000]) + } else { + response_text.clone() + } + ) + }) } } diff --git a/src/main.rs b/src/main.rs index f350612..8932d13 100644 --- a/src/main.rs +++ b/src/main.rs @@ -26,10 +26,6 @@ struct Args { #[arg(long)] file_name: String, - /// Site name where data is staged (not required for unmount) - #[arg(long, required_unless_present = "unmount")] - site_name: Option, - /// Do not use OAuth2 for authentication - use environment variables instead #[arg(long)] no_login: bool, @@ -67,7 +63,6 @@ fn main() -> Result<()> { run( &args.namespace, &args.file_name, - args.site_name.as_ref().unwrap(), &tokens, ) } @@ -84,8 +79,8 @@ fn check_privileges(args: &Args) -> Result<()> { eprintln!(" sudo -E path-finder --namespace {} --file_name {} --unmount", args.namespace, args.file_name); } else { - eprintln!(" sudo -E path-finder --namespace {} --file_name {} --site_name {}", - args.namespace, args.file_name, args.site_name.as_deref().unwrap_or("")); + eprintln!(" sudo -E path-finder --namespace {} --file_name {}", + args.namespace, args.file_name); } anyhow::bail!("Insufficient privileges - sudo required"); } @@ -119,35 +114,34 @@ fn get_tokens_from_env() -> Result { }) } -fn run(namespace: &str, file_name: &str, site_name: &str, tokens: &Tokens) -> Result<()> { +fn run(namespace: &str, file_name: &str, tokens: &Tokens) -> Result<()> { let client = ApiClient::new( tokens.data_management_token.clone(), tokens.site_capabilities_token.clone(), ); client.check_namespace_available(namespace)?; - client.check_site_name_exists(site_name)?; let site_storages = client.site_storage_areas()?; let data_locations = client.locate_data(namespace, file_name)?; print_data_locations_with_sites(&site_storages, &data_locations); - if !is_data_located_at_site(site_name, &data_locations, &site_storages) { - println!( - "Data file '{}' in namespace '{}' is not located at site '{}'.", - file_name, namespace, site_name - ); - println!("Ensure that the data is staged to the site before proceeding."); - exit(1); - } - let rse_path = extract_rse_path(&data_locations, namespace, file_name)?; println!( "RSE Path for file '{}' in namespace '{}': {}", file_name, namespace, rse_path ); + // Check if the file exists locally + if !check_local_file_exists(&rse_path) { + println!("\n⚠️ File not found locally! ⚠️"); + println!("\nThe file is available at the following locations:"); + print_data_locations_with_sites(&site_storages, &data_locations); + println!("\nPlease ensure the data has been staged to this local site before mounting."); + exit(1); + } + mount_data(&rse_path, namespace)?; Ok(()) @@ -172,28 +166,10 @@ fn print_data_locations_with_sites( } } -fn is_data_located_at_site( - site_name: &str, - data_locations: &[DataLocation], - site_stores: &StorageAreaIDToNodeAndSite, -) -> bool { - println!("\nData availability summary:"); - let mut found_at_site = false; - - for location in data_locations { - if let Some((node_name, site, area_name)) = site_stores.get(&location.associated_storage_area_id) { - println!(" - Storage Area: {} ({}) at Site: {} (Node: {})", - area_name, location.associated_storage_area_id, site, node_name); - if site == site_name { - found_at_site = true; - } - } else { - println!(" - Storage Area: {} at Site: Unknown", - location.associated_storage_area_id); - } - } - - found_at_site +fn check_local_file_exists(rse_path: &str) -> bool { + use std::path::Path; + let local_path = format!("/skadata{}", rse_path); + Path::new(&local_path).exists() } fn extract_rse_path( diff --git a/src/models.rs b/src/models.rs index 378f659..191dfc9 100644 --- a/src/models.rs +++ b/src/models.rs @@ -19,7 +19,6 @@ pub struct StorageArea { pub storage_type: String, #[serde(default)] pub relative_path: String, - #[serde(default)] pub tier: Option, } @@ -28,6 +27,7 @@ pub struct Storage { pub id: String, #[serde(default)] pub name: String, + #[serde(default)] pub areas: Vec, } @@ -35,7 +35,9 @@ pub struct Storage { pub struct Site { pub id: String, pub name: String, + #[serde(default)] pub country: String, + #[serde(default)] pub storages: Vec, } @@ -62,7 +64,10 @@ impl Node { let mut mapping = HashMap::new(); for site in &self.sites { for area in site.storage_areas() { - mapping.insert(area.id.clone(), (self.name.clone(), site.name.clone(), area.name.clone())); + mapping.insert( + area.id.clone(), + (self.name.clone(), site.name.clone(), area.name.clone()), + ); } } mapping @@ -70,7 +75,6 @@ impl Node { } pub type NodesAPIResponse = Vec; -pub type SitesAPIResponse = Vec; pub type StorageAreaIDToNodeAndSite = HashMap; pub fn get_all_node_storage_areas(nodes: &[Node]) -> StorageAreaIDToNodeAndSite { From 28739581983c6313c5a9603754ad318d54d3003c Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Thu, 19 Mar 2026 16:46:12 +0000 Subject: [PATCH 13/27] chore(SOG-480): Make sure of executable name and version --- .github/workflows/build.yml | 2 +- Cargo.toml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a6fa4e0..fb5a905 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,4 +25,4 @@ jobs: uses: actions/upload-artifact@v4 with: name: pathfinder-linux-x64 - path: target/x86_64-unknown-linux-gnu/release/pathfinder + path: target/x86_64-unknown-linux-gnu/release/pathFinder diff --git a/Cargo.toml b/Cargo.toml index 9e812f0..848189e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,10 +1,10 @@ [package] -name = "pathfinder" -version = "0.1.0" +name = "pathFinder" +version = "1.0.0" edition = "2021" [[bin]] -name = "pathfinder" +name = "pathFinder" path = "src/main.rs" [dependencies] From e1792b06742f297bdffecc417c5dfefc7249bd2e Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 23 Mar 2026 15:39:48 +0000 Subject: [PATCH 14/27] chore(SOG-480): Remove bash and python scripts --- .python-version | 1 - bash_scripts/pathFinder-nobind.sh | 43 --- bash_scripts/pathFinder.py | 86 ----- bash_scripts/pathFinder.sh | 50 --- path_finder/__init__.py | 0 path_finder/models/__init__.py | 0 path_finder/models/data_management.py | 10 - path_finder/models/site_capabilities.py | 74 ----- path_finder/oauth2_auth.py | 350 --------------------- path_finder/path_finder.py | 401 ------------------------ pyproject.toml | 10 - uv.lock | 199 ------------ 12 files changed, 1224 deletions(-) delete mode 100644 .python-version delete mode 100644 bash_scripts/pathFinder-nobind.sh delete mode 100644 bash_scripts/pathFinder.py delete mode 100644 bash_scripts/pathFinder.sh delete mode 100644 path_finder/__init__.py delete mode 100644 path_finder/models/__init__.py delete mode 100644 path_finder/models/data_management.py delete mode 100644 path_finder/models/site_capabilities.py delete mode 100644 path_finder/oauth2_auth.py delete mode 100644 path_finder/path_finder.py delete mode 100644 pyproject.toml delete mode 100644 uv.lock diff --git a/.python-version b/.python-version deleted file mode 100644 index 24ee5b1..0000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.13 diff --git a/bash_scripts/pathFinder-nobind.sh b/bash_scripts/pathFinder-nobind.sh deleted file mode 100644 index 19cf42b..0000000 --- a/bash_scripts/pathFinder-nobind.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -OPTION=$1 -FITS=$2 -SUDO_GROUP=$3 - -FITS_FILE=$(echo $FITS | awk -F"/" '{print $NF}') -FITS_PATH=$(echo $FITS | awk -F"/" 'BEGIN {OFS = FS} {$(NF--)=""; print}') -BIND_PATH=$(echo $FITS_FILE | awk -F"." '{print $1}') - -if [ "$OPTION" = "--mount" ]; then - # Check if the .binds target is already a mount to avoid cyclical mounts - if mountpoint -q "/home/$SUDO_USER/projects/$BIND_PATH"; then - echo "Error: /home/$SUDO_USER/projects/$BIND_PATH is already mounted; aborting to avoid cyclic mounts." - exit 1 - else - # mkdir -p "/home/$SUDO_USER/.binds/$BIND_PATH" - # chown -R "$SUDO_USER:$SUDO_USER" "/home/$SUDO_USER/.binds/" - # chmod 600 "/home/$SUDO_USER/.binds/$BIND_PATH" - - mkdir -p "/home/$SUDO_USER/projects/$BIND_PATH" - # touch "/home/$SUDO_USER/projects/$FITS_FILE" - chown -R "$SUDO_USER:$SUDO_USER" "/home/$SUDO_USER/projects/" - chmod 500 "/home/$SUDO_USER/projects/$BIND_PATH" - bindfs --perms=0700 --force-user="$SUDO_USER" --force-group="$SUDO_USER" "/skadata/$SUDO_GROUP/$FITS_PATH" "/home/$SUDO_USER/projects/$BIND_PATH" - # mount --bind "/home/$SUDO_USER/.binds/$BIND_PATH/$FITS_FILE" "/home/$SUDO_USER/projects/$FITS_FILE" - fi - # Verify the mount was successful - if mountpoint -q "/home/$SUDO_USER/projects/$BIND_PATH"; then - echo "Mount verification successful: $BIND_PATH is mounted at /home/$SUDO_USER/projects/$BIND_PATH" - else - echo "Error: Mount verification failed for $BIND_PATH at /home/$SUDO_USER/projects/$BIND_PATH" - exit 1 - fi -elif [ "$OPTION" = "--unmount" ]; then - umount "/home/$SUDO_USER/projects/$BIND_PATH" - # umount "/home/$SUDO_USER/.binds/$BIND_PATH" - # rm -rf "/home/$SUDO_USER/.binds/$BIND_PATH" - rm -f "/home/$SUDO_USER/projects/$BIND_PATH" - echo "Unmounted $FITS_FILE from /home/$SUDO_USER/projects/$BIND_PATH" -else - echo "Usage: $0 [--mount|--unmount] " - exit 1 -fi diff --git a/bash_scripts/pathFinder.py b/bash_scripts/pathFinder.py deleted file mode 100644 index a438d96..0000000 --- a/bash_scripts/pathFinder.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python3 -import argparse -import os -import sys -import subprocess - -def run(cmd, check=True): - res = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - if check and res.returncode != 0: - print(f"Error running: {' '.join(cmd)}\n{res.stderr.strip()}", file=sys.stderr) - sys.exit(res.returncode) - return res - -def is_mountpoint(path): - return subprocess.run(['mountpoint', '-q', path]).returncode == 0 - -def main(): - p = argparse.ArgumentParser(prog=os.path.basename(__file__), add_help=False) - p.add_argument('option', nargs='?') - p.add_argument('fits', nargs='?') - p.add_argument('sudo_group', nargs='?') - args = p.parse_args() - - if args.option not in ('--mount', '--unmount'): - print(f"Usage: {os.path.basename(__file__)} [--mount|--unmount] ") - sys.exit(1) - - if not args.fits or not args.sudo_group: - print("Error: missing or ", file=sys.stderr) - sys.exit(1) - - sudo_user = os.environ.get('SUDO_USER') - if not sudo_user: - print("Error: SUDO_USER not set. Run via sudo.", file=sys.stderr) - sys.exit(1) - - fits = args.fits - fits_file = os.path.basename(fits) - fits_path = os.path.dirname(fits) # may be '' - bind_name = os.path.splitext(fits_file)[0] - - home = f"/home/{sudo_user}" - bind_dir = os.path.join(home, '.binds', bind_name) - projects_dir = os.path.join(home, 'projects') - projects_file = os.path.join(projects_dir, fits_file) - skadata_src = os.path.join('/skadata', args.sudo_group, fits_path) - - if args.option == '--mount': - # avoid cyclic mounts - if is_mountpoint(bind_dir): - print(f"Error: {bind_dir} is already mounted; aborting to avoid cyclic mounts.", file=sys.stderr) - sys.exit(1) - - os.makedirs(bind_dir, exist_ok=True) - os.makedirs(projects_dir, exist_ok=True) - - # touch project file - open(projects_file, 'a').close() - - # set ownership and perms - run(['chown', '-R', f'{sudo_user}:{sudo_user}', os.path.join(home, '.binds')]) - run(['chmod', '600', bind_dir]) # file-like perms in original; keep simple - run(['chown', '-R', f'{sudo_user}:{sudo_user}', projects_dir]) - run(['chmod', '500', projects_file]) - - # bindfs then bind mount - run(['bindfs', '--perms=0700', f'--force-user={sudo_user}', f'--force-group={sudo_user}', skadata_src, bind_dir]) - run(['mount', '--bind', os.path.join(bind_dir, fits_file), projects_file]) - - # verify - if is_mountpoint(projects_file): - print(f"Mount verification successful: {fits_file} is mounted at {projects_file}") - else: - print(f"Error: Mount verification failed for {fits_file} at {projects_file}", file=sys.stderr) - sys.exit(1) - - elif args.option == '--unmount': - run(['umount', projects_file], check=False) - run(['umount', bind_dir], check=False) - run(['rm', '-rf', bind_dir]) - run(['rm', '-f', projects_file]) - print(f"Unmounted {fits_file} from {projects_file}") - -if __name__ == '__main__': - main() -# EOF \ No newline at end of file diff --git a/bash_scripts/pathFinder.sh b/bash_scripts/pathFinder.sh deleted file mode 100644 index 7b10d69..0000000 --- a/bash_scripts/pathFinder.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -OPTION=$1 -FITS=$2 -SUDO_GROUP=$3 - -FITS_FILE=$(echo $FITS | awk -F"/" '{print $NF}') -FITS_PATH=$(echo $FITS | awk -F"/" 'BEGIN {OFS = FS} {$(NF--)=""; print}') -FITS_GROUP=$(echo $FITS | awk -F"/" '{print $1}') -BIND_PATH=$(echo $FITS_FILE | awk -F"." '{print $1}') - -if [ "$OPTION" = "--mount" ]; then - # Check if the .binds target is already a mount to avoid cyclical mounts - if mountpoint -q "/home/$SUDO_USER/.binds/$FITS_PATH"; then - echo "Error: /home/$SUDO_USER/.binds/$FITS_PATH is already mounted; aborting to avoid cyclic mounts." - exit 1 - else - # Verify that the provided sudo group matches the namespace - if [ $FITS_GROUP != "$SUDO_GROUP" ]; then - echo "Error: Provided sudo group '$SUDO_GROUP' does not match fits group '$FITS_GROUP'; aborting." - exit 1 - else - mkdir -p "/home/$SUDO_USER/.binds/$BIND_PATH" - chown -R "$SUDO_USER:$SUDO_USER" "/home/$SUDO_USER/.binds/" - chmod 600 "/home/$SUDO_USER/.binds/$BIND_PATH" - - mkdir -p "/home/$SUDO_USER/projects" - touch "/home/$SUDO_USER/projects/$FITS_FILE" - chown -R "$SUDO_USER:$SUDO_USER" "/home/$SUDO_USER/projects/" - chmod 600 "/home/$SUDO_USER/projects/$FITS_FILE" - bindfs --perms=0700 --force-user="$SUDO_USER" --force-group="$SUDO_USER" "/skadata/$FITS_PATH" "/home/$SUDO_USER/.binds/$BIND_PATH" - mount --bind "/home/$SUDO_USER/.binds/$BIND_PATH/$FITS_FILE" "/home/$SUDO_USER/projects/$FITS_FILE" - fi - fi - # Verify the mount was successful - if mountpoint -q "/home/$SUDO_USER/projects/$FITS_FILE"; then - echo "Mount verification successful: $FITS_FILE is mounted at /home/$SUDO_USER/projects/$FITS_FILE" - else - echo "Error: Mount verification failed for $FITS_FILE at /home/$SUDO_USER/projects/$FITS_FILE" - exit 1 - fi -elif [ "$OPTION" = "--unmount" ]; then - umount "/home/$SUDO_USER/projects/$FITS_FILE" - umount "/home/$SUDO_USER/.binds/$BIND_PATH" - rm -rf "/home/$SUDO_USER/.binds/$BIND_PATH" - rm -f "/home/$SUDO_USER/projects/$FITS_FILE" - echo "Unmounted $FITS_FILE from /home/$SUDO_USER/projects/$FITS_FILE" -else - echo "Usage: $0 [--mount|--unmount] " - exit 1 -fi diff --git a/path_finder/__init__.py b/path_finder/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/path_finder/models/__init__.py b/path_finder/models/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/path_finder/models/data_management.py b/path_finder/models/data_management.py deleted file mode 100644 index 448cf6e..0000000 --- a/path_finder/models/data_management.py +++ /dev/null @@ -1,10 +0,0 @@ -from pydantic import BaseModel, TypeAdapter - - -class DataLocation(BaseModel): - identifier: str - associated_storage_area_id: str - replicas: list[str] - - -DataLocationAPIResponse = TypeAdapter(list[DataLocation]) diff --git a/path_finder/models/site_capabilities.py b/path_finder/models/site_capabilities.py deleted file mode 100644 index eacd9ad..0000000 --- a/path_finder/models/site_capabilities.py +++ /dev/null @@ -1,74 +0,0 @@ -import itertools -from pydantic import BaseModel, Field, TypeAdapter - -# Type aliases to aid readability of model classes -SiteName = str -NodeName = str -StorageAreaID = str -SiteNameToStorageAreas = dict[SiteName, list["StorageArea"]] -NodeNameToSiteStorageAreas = dict[NodeName, SiteNameToStorageAreas] -StorageAreaIDToNodeAndSite = dict[StorageAreaID, tuple[NodeName, SiteName]] - - -class StorageArea(BaseModel): - id: StorageAreaID - name: str = Field(default="") - type: str = Field(default="") - relative_path: str = Field(default="") - tier: int | None = Field(default=None) - - -class Storage(BaseModel): - id: str - name: str = Field(default="") - areas: list[StorageArea] - - -class Site(BaseModel): - id: str - name: SiteName - country: str - storages: list[Storage] - - @property - def storage_areas(self) -> list[StorageArea]: - """Collate all storage areas from all storages in this site.""" - return list( - itertools.chain.from_iterable(storage.areas for storage in self.storages) - ) - - -class Node(BaseModel): - name: NodeName - description: str = Field(default="") - sites: list[Site] = Field(default=[]) - - @property - def storage_areas(self) -> SiteNameToStorageAreas: - """Construct a mapping of site names to their storage areas.""" - return {site.name: [area for area in site.storage_areas] for site in self.sites} - - @property - def storage_area_id_to_site_name(self) -> StorageAreaIDToNodeAndSite: - """Construct a mapping of storage area IDs to their corresponding node and site names.""" - mapping: dict[str, tuple[NodeName, SiteName]] = {} - for site_name, storage_areas in self.storage_areas.items(): - mapping.update({area.id: (self.name, site_name) for area in storage_areas}) - return mapping - - -# Define an entity which represents the API response containing a list of nodes -NodesAPIResponse = TypeAdapter(list[Node]) -SitesAPIResponse = TypeAdapter(list[Site]) - -def get_all_node_storage_areas(nodes: list[Node]) -> StorageAreaIDToNodeAndSite: - """Fetch all nodes and construct a mapping of storage area IDs to their corresponding node and site names. - - Returns: - StorageAreaIDToNodeAndSite: A mapping of storage area IDs to their corresponding node - and site names. - """ - storage_area_mapping: StorageAreaIDToNodeAndSite = {} - for node in nodes: - storage_area_mapping.update(node.storage_area_id_to_site_name) - return storage_area_mapping diff --git a/path_finder/oauth2_auth.py b/path_finder/oauth2_auth.py deleted file mode 100644 index e5571b8..0000000 --- a/path_finder/oauth2_auth.py +++ /dev/null @@ -1,350 +0,0 @@ -#!/usr/bin/env python3 -""" -OAuth2 Device Code Flow authentication for SKA APIs. - -This module implements OAuth2 device code flow to authenticate users and obtain -access tokens for the Data Management and Site Capabilities APIs. -""" - -import json -import os -import re -import time -from datetime import datetime, timedelta -from pathlib import Path -import requests - - -# Authentication endpoints -AUTHN_BASE_URL = "https://authn.srcnet.skao.int/api/v1" -DATA_MANAGEMENT = "data-management-api" -SITE_CAPABILITIES = "site-capabilities-api" - - -class OAuth2AuthenticationError(Exception): - """Exception raised for OAuth2 authentication errors.""" - - pass - - -def authenticate(use_cache: bool = True) -> dict[str, str]: - """Complete OAuth2 device code flow and obtain all required API tokens. - - Args: - use_cache: Whether to use cached tokens if available (default: True). - - Returns: - Dict containing: - - data_management_token: Token for Data Management API - - site_capabilities_token: Token for Site Capabilities API - - Raises: - OAuth2AuthenticationError: If authentication fails at any step. - """ - - # Try to load from cache first - if use_cache: - cached_tokens = load_tokens_from_cache() - if cached_tokens: - return cached_tokens - - # Perform full authentication flow - device_info = initiate_device_code_flow() - display_user_instructions(device_info) - - device_code = device_info["device_code"] - interval = int(device_info.get("interval", 5)) - auth_token = poll_for_authentication(device_code, interval) - - # Get API-specific tokens - dm_token = exchange_token_for_api_token(auth_token, DATA_MANAGEMENT) - sc_token = exchange_token_for_api_token(auth_token, SITE_CAPABILITIES) - - tokens = {"data_management_token": dm_token, "site_capabilities_token": sc_token} - - # Save to cache (default expiration: 1 hour) - save_tokens_to_cache(tokens, expires_in=3600) - - return tokens - - -def save_tokens_to_cache(tokens: dict[str, str], expires_in: int = 3600) -> None: - """Save authentication tokens to cache file. - - Args: - tokens: Dictionary containing authentication tokens. - expires_in: Token expiration time in seconds (default: 1 hour). - """ - cache_path = get_token_cache_path() - - # Calculate expiration time - expiration = (datetime.now() + timedelta(seconds=expires_in)).isoformat() - - cache_data = {"tokens": tokens, "expires_at": expiration} - - # Write to cache with secure permissions - cache_path.write_text(json.dumps(cache_data, indent=2)) - os.chmod(cache_path, 0o600) # Read/write for owner only - print(f"Tokens cached until {expiration}") - - -def load_tokens_from_cache() -> dict[str, str] | None: - """Load authentication tokens from cache if valid. - - Returns: - Dictionary containing tokens if valid, None if expired or not found. - """ - cache_path = get_token_cache_path() - - if not cache_path.exists(): - return None - - try: - cache_data = json.loads(cache_path.read_text()) - - # Check if tokens are expired - expires_at = datetime.fromisoformat(cache_data["expires_at"]) - if datetime.now() >= expires_at: - print("Cached tokens expired") - return None - - print("Using cached tokens") - return cache_data["tokens"] - - except (json.JSONDecodeError, KeyError, ValueError) as e: - print(f"Invalid cache file: {e}") - return None - - -def get_token_cache_path() -> Path: - """Get the path to the token cache file. - - Returns: - Path to the token cache file in user's config directory. - """ - config_dir = Path.home() / ".config" / "path-finder" - config_dir.mkdir(parents=True, exist_ok=True) - return config_dir / "tokens.json" - - -def initiate_device_code_flow() -> dict[str, str]: - """Initiate the OAuth2 device code flow. - - Returns: - Dict containing: - - device_code: Code to use for polling - - user_code: Code for user to enter - - verification_uri: URL for user to visit - - expires_in: Seconds until codes expire - - interval: Polling interval in seconds - - Raises: - OAuth2AuthenticationError: If the request fails. - """ - try: - # Request device and user codes from authn service - response = requests.get( - f"{AUTHN_BASE_URL}/login/device", - timeout=10, - ) - response.raise_for_status() - return response.json() - except requests.exceptions.RequestException as e: - raise OAuth2AuthenticationError(f"Failed to initiate device code flow: {e}") - - -def display_user_instructions(device_info: dict[str, str]) -> None: - """Display instructions for the user to authenticate. - - Args: - verification_uri: The URL the user should visit. - user_code: The code the user should enter. - """ - verification_uri = device_info["verification_uri"] - user_code = device_info["user_code"] - print( - f"\nACTION REQUIRED:\n Open this URL in a browser and authenticate: {verification_uri}?user_code={user_code}" - ) - print("\nWaiting for authentication (timeout: 5 minutes)...") - - -def poll_for_authentication( - device_code: str, interval: int = 5, timeout: int = 300 -) -> str: - """Poll the authorization server for the authorization code. - - Args: - device_code: The device code from the initial request. - interval: Polling interval in seconds. - timeout: Maximum time to poll in seconds. - - Returns: - The authorization code. - - Raises: - OAuth2AuthenticationError: If polling fails or times out. - """ - start_time = time.time() - - while time.time() - start_time < timeout: - try: - response = requests.get( - f"{AUTHN_BASE_URL}/token", - params={"device_code": device_code}, - timeout=10, - ) - - if response.status_code == 200: - token_data = response.json() - # authn device flow returns access_token directly - access_token_data = token_data.get("token") - if not access_token_data: - raise OAuth2AuthenticationError( - f"No access_token in response. Received: {token_data.keys()}" - ) - return access_token_data.get("access_token") - - # Parse error response - API wraps IAM errors in 'detail' field - error_data = response.json() - error, error_description = parse_wrapped_error_response(error_data) - - if error == "authorization_pending": - time.sleep(interval) - continue - elif error == "slow_down": - interval += 5 - time.sleep(interval) - continue - elif error == "expired_token": - raise OAuth2AuthenticationError( - "Device code expired. Please try again." - ) - elif error == "access_denied": - raise OAuth2AuthenticationError("User denied authorization.") - else: - error_msg = f"Authorization error: {error}" - if error_description: - error_msg += f" - {error_description}" - raise OAuth2AuthenticationError(error_msg) - - except requests.exceptions.RequestException as e: - raise OAuth2AuthenticationError(f"Failed to poll for authorization: {e}") - - raise OAuth2AuthenticationError("Authorization timeout. Please try again.") - - -def parse_wrapped_error_response(error_data: dict) -> tuple[str | None, str | None]: - """Parse error response that may be wrapped by the API. - - Args: - error_data: The JSON error response from the API. - - Returns: - Tuple of (error, error_description). - """ - error = None - error_description = None - - if "detail" in error_data: - # Extract JSON from "response: {...}" pattern in detail string - detail = error_data["detail"] - match = re.search(r"response:\s*(\{.*\})\s*$", detail) - if match: - try: - # Parse the embedded JSON - embedded_json = json.loads(match.group(1)) - error = embedded_json.get("error") - error_description = embedded_json.get("error_description") - except json.JSONDecodeError: - pass - - # Fallback to direct error field if not wrapped - if not error: - error = error_data.get("error") - error_description = error_data.get("error_description") - - return error, error_description - - -def exchange_code_for_auth_token(code: str) -> str: - """Exchange authorization code for authentication token. - - Args: - code: The authorization code from the device flow. - - Returns: - The authentication token. - - Raises: - OAuth2AuthenticationError: If the exchange fails. - """ - try: - response = requests.get( - f"{AUTHN_BASE_URL}/token", params={"code": code}, timeout=10 - ) - response.raise_for_status() - - token_data = response.json() - auth_token = token_data.get("access_token") or token_data.get("token") - - if not auth_token: - raise OAuth2AuthenticationError("No access token in response") - - return auth_token - - except requests.exceptions.RequestException as e: - raise OAuth2AuthenticationError(f"Failed to exchange code for auth token: {e}") - - -def exchange_token_for_api_token(auth_token: str, api_name: str) -> str: - """Exchange authentication token for a specific API token. - - Args: - auth_token: The authentication token from the previous step. - api_name: The API name ('data-management' or 'site-capabilities'). - - Returns: - The API-specific access token. - - Raises: - OAuth2AuthenticationError: If the exchange fails. - """ - try: - response = requests.get( - f"{AUTHN_BASE_URL}/token/exchange/{api_name}", - headers={"Content-Type": "application/json"}, - params={ - "version": "latest", - "try_use_cache": "false", - "access_token": auth_token, - }, - timeout=10, - ) - response.raise_for_status() - - token_data = response.json() - api_token = token_data.get("access_token") or token_data.get("token") - - if not api_token: - raise OAuth2AuthenticationError( - f"No access token in response for {api_name}" - ) - - return api_token - - except requests.exceptions.RequestException as e: - raise OAuth2AuthenticationError( - f"Failed to exchange token for {api_name} API: {e}" - ) - - -if __name__ == "__main__": - """Test the authentication flow.""" - try: - tokens = authenticate() - print("Tokens obtained successfully:") - print(f" DM Token: {tokens['data_management_token'][:20]}...") - print(f" SC Token: {tokens['site_capabilities_token'][:20]}...") - except OAuth2AuthenticationError as e: - print(f"Authentication failed: {e}") - exit(1) diff --git a/path_finder/path_finder.py b/path_finder/path_finder.py deleted file mode 100644 index e5aaf12..0000000 --- a/path_finder/path_finder.py +++ /dev/null @@ -1,401 +0,0 @@ -#!/usr/bin/env python3 -# -# path-finder: A tool for finding SKA data paths for mounting purposes. -# - -import argparse -import grp -import itertools -import os -import re -import subprocess -from venv import logger - -import requests - -from models.data_management import DataLocationAPIResponse, DataLocation -from oauth2_auth import authenticate, OAuth2AuthenticationError -from models.site_capabilities import ( - Site, - SitesAPIResponse, - StorageAreaIDToNodeAndSite, - NodesAPIResponse, - get_all_node_storage_areas, -) - - -# Inputs - these can be inputs -DATA_NAMESPACE = "daac" -DATA_FILE = "pi24_test_run_1_cleaned.fits" -SLURM_SITE_NAME = "UKSRC-CAM-PREPROD" - -# Upstream services -DM_API_BASEURL = "https://data-management.srcnet.skao.int/api/v1" -SC_API_BASEURL = "https://site-capabilities.srcnet.skao.int/api/v1" - - -def main( - namespace: str = DATA_NAMESPACE, - file_name: str = DATA_FILE, - site_name: str = SLURM_SITE_NAME, - tokens: dict[str, str] = {}, - *args, - **kwargs, -) -> None: - """Main function to locate data and print out storage area information.""" - - check_namespace_available(namespace, tokens["data_management_token"]) - check_site_name_exists(site_name, tokens["site_capabilities_token"]) - - site_storages = site_storage_areas(tokens["site_capabilities_token"]) - data_locations = locate_data(namespace, file_name, tokens["data_management_token"]) - - print_data_locations_with_sites(site_storages, data_locations) - - if not is_data_located_at_site(site_name, data_locations, site_storages): - print( - f"Data file '{file_name}' in namespace '{namespace}' is not located at site '{site_name}'." - ) - print("Ensure that the data is staged to the site before proceeding.") - # TODO: If the data isn't available at the SLURM_SITE_NAME, perhaps we could stage it - exit(1) - - rse_path = extract_rse_path(data_locations, namespace, file_name) - print(f"RSE Path for file '{file_name}' in namespace '{namespace}': {rse_path}") - - mount_data(rse_path, namespace) - - -def check_namespace_available(namespace: str, dm_api_token: str) -> None: - """Check if the specified namespace is available. - - Args: - namespace (str): The namespace to check. - - Raises: - RuntimeError: If the namespace is not available. - """ - all_namespaces = get_all_namespaces(dm_api_token) - if namespace not in all_namespaces: - raise RuntimeError( - f"Namespace '{namespace}' not found in available namespaces: {all_namespaces}" - ) - - -def get_all_namespaces(dm_api_token: str) -> list[str]: - """Fetch all available namespaces from the Data Management API. - - Returns: - A list of available namespace strings. - """ - headers = {"Authorization": f"Bearer {dm_api_token}"} - try: - response = requests.get(f"{DM_API_BASEURL}/data/list", headers=headers) - # TODO: Handle 401 Unauthorized - response.raise_for_status() - except requests.exceptions.RequestException as e: - raise RuntimeError(f"Error requesting namespaces from DM API:\n{e}") - namespaces = response.json() - return namespaces - - -def check_site_name_exists(site_name: str, sc_api_token: str) -> None: - """Check if the specified site name exists. - - Args: - site_name (str): The site name to check. - - Raises: - RuntimeError: If the site name does not exist. - """ - all_sites = all_site_names(sc_api_token) - if site_name not in all_sites: - logger.error( - f"Error: Site name '{site_name}' not found in available sites:\n\n{', '.join(all_sites)}" - ) - exit(1) - - -def all_site_names(sc_api_token: str) -> list[str]: - """Fetch the complete site capabilities and return all site name strings. - - Returns: - A list of all available site name strings. - """ - headers = {"Authorization": f"Bearer {sc_api_token}"} - try: - response = requests.get(f"{SC_API_BASEURL}/sites", headers=headers) - response.raise_for_status() - except requests.exceptions.RequestException as e: - raise RuntimeError(f"Error requesting node information from SC API:\n{e}") - - nodes_response = SitesAPIResponse.validate_python(response.json()) - - return [site.name for site in nodes_response] - - -def site_storage_areas(sc_api_token: str) -> StorageAreaIDToNodeAndSite: - """Fetch the site capabilities and obtain a storage area mapping of storage area IDs. - - Returns: - StorageAreaIDToNodeAndSite: A mapping of storage area IDs to their corresponding node - and site names. - """ - headers = {"Authorization": f"Bearer {sc_api_token}"} - try: - response = requests.get(f"{SC_API_BASEURL}/nodes", headers=headers) - response.raise_for_status() - except requests.exceptions.RequestException as e: - raise RuntimeError(f"Error requesting node information from SC API:\n{e}") - - nodes_response = NodesAPIResponse.validate_python(response.json()) - - return get_all_node_storage_areas(nodes_response) - - -def locate_data( - namespace: str, - file_name: str, - dm_api_token: str, -) -> list[DataLocation]: - """Locate a data file within a specified namespace. - - Args: - namespace (str): the file namespace - e.g. 'testing', 'daac', 'teal', 'neon' - file_name (str): the path of the file within the namespace - e.g. 'pi24_test_run_1_cleaned.fits', 'pi25_daac_tests' - - Returns: - A list of DataLocation objects representing the locations of the data file. - """ - - headers = {"Authorization": f"Bearer {dm_api_token}"} - - # Query the Data Management API to locate the file - try: - response = requests.get( - f"{DM_API_BASEURL}/data/locate/{namespace}/{file_name}", - headers=headers, - ) - response.raise_for_status() - except requests.exceptions.RequestException as e: - raise RuntimeError( - f"Error requesting location of file '{file_name}' in namespace '{namespace}' from DM API:\n{e}" - ) - - data_locations_response = DataLocationAPIResponse.validate_python(response.json()) - return data_locations_response - - -def print_data_locations_with_sites( - site_stores: StorageAreaIDToNodeAndSite, data_locations: list[DataLocation] -) -> None: - """Print data locations with their associated site information. - - Args: - site_storages: Mapping of storage area IDs to node and site names. - data_locations: List of data location objects to print. - """ - for location in data_locations: - node_site = site_stores.get(location.associated_storage_area_id) - if node_site: - node_name, site_name = node_site - print( - f"Data location ID: {location.identifier}, Storage Area ID: {location.associated_storage_area_id}, Node: {node_name}, Site: {site_name}" - ) - else: - print( - f"Data location ID: {location.identifier}, Storage Area ID: {location.associated_storage_area_id}, Node/Site: Not found" - ) - - -def is_data_located_at_site( - site_name: str, - data_locations: list[DataLocation], - site_stores: StorageAreaIDToNodeAndSite, -) -> bool: - """Check if any data locations are associated with the specified site name. - - Args: - site_name (str): The site name to check. - data_locations (list[DataLocation]): The list of data locations to search. - - Returns: - True if any data location is associated with the specified site name, False otherwise. - """ - sites_with_data = [ - site_stores.get(location.associated_storage_area_id, (None, None))[1] - for location in data_locations - ] - - print(f"Sites with data: {sites_with_data}") - if site_name in sites_with_data: - return True - return False - - -def extract_rse_path( - data_locations: list[DataLocation], namespace: str, file_name: str -) -> str: - """Extract the RSE path from data locations for a given namespace and file name. - - Do checks: - - at least one path is found - - consistency across paths from different replicas - - Args: - data_locations (list[DataLocation]): The list of data locations to search. - namespace (str): The namespace of the data. - file_name (str): The name of the data file. - - Returns: - The extracted RSE path. - """ - - rse_path_match = re.compile(rf"/{namespace}/.*$") - matched_paths: set[str] = set() - unmatched_paths: list[str] = [] - - replica_uris = itertools.chain.from_iterable( - [location.replicas for location in data_locations] - ) - for uri in replica_uris: - match = rse_path_match.search(uri) - if match: - matched_paths.add(match.group(0)) - else: - unmatched_paths.append(uri) - - # Report any unmatched URIs - if unmatched_paths: - print( - f"Warning: {len(unmatched_paths)} URIs did not match the expected pattern." - ) - print(f"Unmatched URIs: {unmatched_paths}") - - # Validate we have exactly one unique path - if not matched_paths: - raise RuntimeError( - f"No valid paths found for file '{file_name}' in namespace '{namespace}'." - ) - - if len(matched_paths) > 1: - print(f"Warning: Multiple unique paths found: {matched_paths}") - print( - "We should check the path for the local RSE - by cross-referencing with site capabilities." - ) - raise NotImplementedError("Handling multiple matched paths is not implemented.") - - return matched_paths.pop() - - -def mount_data(rse_path: str, namespace: str) -> None: - """Mount the data at the specified RSE path using sudo pathfinder. - - Args: - rse_path (str): The RSE path to mount. - namespace (str): The namespace of the data. - - Raises: - RuntimeError: If the mount command fails. - """ - print(f"Mounting data from RSE path: {rse_path} in namespace: {namespace}") - - # Construct the sudo command - cmd = ["sudo", "pathfinder", "--mount", rse_path, namespace] - - try: - # Execute the command - result = subprocess.run( - cmd, - capture_output=True, - text=True, - check=False, # Don't raise exception, handle manually - timeout=30, # 30 second timeout - ) - - # Print stdout if available - if result.stdout: - print(f"Mount output: {result.stdout.strip()}") - - # Check return code - if result.returncode != 0: - error_msg = f"Mount command failed with exit code {result.returncode}" - if result.stderr: - error_msg += f": {result.stderr.strip()}" - raise RuntimeError(error_msg) - - print(f"Successfully mounted {rse_path} in namespace {namespace}") - - except subprocess.TimeoutExpired: - raise RuntimeError("Mount command timed out after 30 seconds") - except FileNotFoundError: - raise RuntimeError( - "pathfinder command not found. Ensure it's installed and in PATH." - ) - except PermissionError: - raise RuntimeError("Permission denied. Ensure sudo is configured correctly.") - except Exception as e: - raise RuntimeError(f"Unexpected error during mount: {str(e)}") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Path Finder") - parser.add_argument("--namespace", required=True, help="Namespace of the data") - parser.add_argument("--file_name", required=True, help="Name of the data file") - parser.add_argument( - "--site_name", required=True, help="Site name where data is staged" - ) - parser.add_argument( - "--no-login", - action="store_true", - help="Do not use OAuth2 for authentication - use environment variables instead", - ) - args = parser.parse_args() - - # DEBUG: Print user and group information - # user = os.getlogin() - # groups = os.getgroups() - # sudo_user = os.environ.get("SUDO_USER") - # if sudo_user: - # print(f"Running path-finder as sudo user: {sudo_user}") - # else: - # print("Not running Python as sudo.") - # print(f"Running path-finder as local user: {user}") - # group_names = [grp.getgrgid(gid).gr_name for gid in groups] - # print(f"User '{user}' belongs to groups: {group_names}") - - if not args.no_login: - # Use OAuth2 device code flow to authenticate - try: - print("Authenticating with OAuth2...") - tokens = authenticate() - print("Authentication successful!") - except OAuth2AuthenticationError as e: - print(f"Authentication failed: {e}") - exit(1) - else: - # Fall back to environment variables - try: - data_management_access_token = os.environ["DATA_MANAGEMENT_ACCESS_TOKEN"] - except KeyError: - print( - "Error: Please set DATA_MANAGEMENT_ACCESS_TOKEN environment variable or use --login flag." - ) - exit(1) - - try: - site_capabilities_access_token = os.environ[ - "SITE_CAPABILITIES_ACCESS_TOKEN" - ] - except KeyError: - print( - "Error: Please set SITE_CAPABILITIES_ACCESS_TOKEN environment variable or use --login flag." - ) - exit(1) - tokens = { - "data_management_token": data_management_access_token, - "site_capabilities_token": site_capabilities_access_token, - } - - - main(**vars(args), tokens=tokens) diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 3b08a87..0000000 --- a/pyproject.toml +++ /dev/null @@ -1,10 +0,0 @@ -[project] -name = "path-finder" -version = "0.1.0" -description = "CLI Program to authorise a users access to some srcNet data and return the RSE path" -readme = "README.md" -requires-python = ">=3.13" -dependencies = [ - "pydantic>=2.12.5", - "requests>=2.32.5", -] diff --git a/uv.lock b/uv.lock deleted file mode 100644 index bc2f3fb..0000000 --- a/uv.lock +++ /dev/null @@ -1,199 +0,0 @@ -version = 1 -revision = 2 -requires-python = ">=3.13" - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, -] - -[[package]] -name = "certifi" -version = "2025.11.12" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, - { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, - { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, - { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, - { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, - { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, - { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, - { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, - { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, - { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, - { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, - { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, - { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, - { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, - { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, - { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, - { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, -] - -[[package]] -name = "idna" -version = "3.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, -] - -[[package]] -name = "path-finder" -version = "0.1.0" -source = { virtual = "." } -dependencies = [ - { name = "pydantic" }, - { name = "requests" }, -] - -[package.metadata] -requires-dist = [ - { name = "pydantic", specifier = ">=2.12.5" }, - { name = "requests", specifier = ">=2.32.5" }, -] - -[[package]] -name = "pydantic" -version = "2.12.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, -] - -[[package]] -name = "pydantic-core" -version = "2.41.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, - { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, - { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, - { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, - { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, - { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, - { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, - { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, - { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, - { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, - { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, - { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, - { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, - { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, -] - -[[package]] -name = "requests" -version = "2.32.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, -] - -[[package]] -name = "typing-extensions" -version = "4.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, -] - -[[package]] -name = "typing-inspection" -version = "0.4.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, -] - -[[package]] -name = "urllib3" -version = "2.6.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" }, -] From 3fb57a653c75c719f90030dee74f999c108d1e2f Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 23 Mar 2026 15:41:34 +0000 Subject: [PATCH 15/27] fix(SGO-480): Add Cargo.lock to version control --- .gitignore | 1 - Cargo.lock | 2105 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 2105 insertions(+), 1 deletion(-) create mode 100644 Cargo.lock diff --git a/.gitignore b/.gitignore index 80d96c2..d81bd3e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ # Rust/Cargo target/ -Cargo.lock **/*.rs.bk *.pdb diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..8ae7782 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,2105 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cc" +version = "1.2.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" +dependencies = [ + "find-msvc-tools", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "clap" +version = "4.5.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "dirs" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.48.0", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-core", + "futures-io", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "system-configuration", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "js-sys" +version = "0.3.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7e709f3e3d22866f9c25b3aff01af289b18422cc8b4262fb19103ee80fe513d" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libc" +version = "0.2.182" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" + +[[package]] +name = "libredox" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" +dependencies = [ + "bitflags", + "libc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "native-tls" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "openssl" +version = "0.10.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "pathFinder" +version = "1.0.0" +dependencies = [ + "anyhow", + "clap", + "dirs", + "libc", + "regex", + "reqwest", + "serde", + "serde_json", + "thiserror", + "tokio", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c" + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustix" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" +dependencies = [ + "bitflags", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0136791f7c95b1f6dd99f9cc786b91bb81c3800b639b3478e561ddb7be95e5f1" +dependencies = [ + "fastrand", + "getrandom 0.4.1", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.49.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec1adf1535672f5b7824f817792b1afd731d7e843d2d04ec8f27e8cb51edd8ac" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe88540d1c934c4ec8e6db0afa536876c5441289d7f9f9123d4f065ac1250a6b" +dependencies = [ + "cfg-if", + "futures-util", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19e638317c08b21663aed4d2b9a2091450548954695ff4efa75bff5fa546b3b1" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c64760850114d03d5f65457e96fc988f11f01d38fbaa51b254e4ab5809102af" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60eecd4fe26177cfa3339eb00b4a36445889ba3ad37080c2429879718e20ca41" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "web-sys" +version = "0.3.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d6bb20ed2d9572df8584f6dc81d68a41a625cadc6f15999d649a70ce7e3597a" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" From 6bd26fe5d1283a4c6399fb58d4dd4dcae33b7f96 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 30 Mar 2026 12:03:15 +0100 Subject: [PATCH 16/27] chore(SOG-480): Add docs and tests --- Cargo.lock | 797 ++++++++++++++++++++++++++++++++++++++++++++-- Cargo.toml | 1 + README.md | 142 +++------ RUST_README.md | 186 ----------- src/api_client.rs | 356 +++++++++++++++++++-- src/main.rs | 46 +-- 6 files changed, 1157 insertions(+), 371 deletions(-) delete mode 100644 RUST_README.md diff --git a/Cargo.lock b/Cargo.lock index 8ae7782..5621a85 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -67,24 +67,275 @@ version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" +[[package]] +name = "ascii-canvas" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" +dependencies = [ + "term", +] + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "async-attributes" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c96bf972d85afc50bf5ab8fe2d54d1586b4e0b46c97c50a0c9e71e2f7bcd812a" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "pin-project-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.5.0", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" +dependencies = [ + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-lock" +version = "3.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" +dependencies = [ + "event-listener 5.4.1", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-object-pool" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "333c456b97c3f2d50604e8b2624253b7f787208cb72eb75e64b0ad11b221652c" +dependencies = [ + "async-std", +] + +[[package]] +name = "async-process" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" +dependencies = [ + "async-channel 2.5.0", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener 5.4.1", + "futures-lite", + "rustix", +] + +[[package]] +name = "async-signal" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-std" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b" +dependencies = [ + "async-attributes", + "async-channel 1.9.0", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "atomic-waker" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "basic-cookies" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67bd8fd42c16bdb08688243dc5f0cc117a3ca9efeeaba3a345a18a6159ad96f7" +dependencies = [ + "lalrpop", + "lalrpop-util", + "regex", +] + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bitflags" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" +[[package]] +name = "blocking" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" +dependencies = [ + "async-channel 2.5.0", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + [[package]] name = "bumpalo" version = "3.20.2" @@ -144,7 +395,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 2.0.117", ] [[package]] @@ -159,6 +410,15 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "core-foundation" version = "0.9.4" @@ -185,6 +445,18 @@ version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + [[package]] name = "dirs" version = "5.0.1" @@ -194,6 +466,16 @@ dependencies = [ "dirs-sys", ] +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + [[package]] name = "dirs-sys" version = "0.4.1" @@ -206,6 +488,17 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -214,7 +507,22 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "ena" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabffdaee24bd1bf95c5ef7cec31260444317e72ea56c4c91750e8b7ee58d5f1" +dependencies = [ + "log", ] [[package]] @@ -242,6 +550,33 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener 5.4.1", + "pin-project-lite", +] + [[package]] name = "fastrand" version = "2.3.0" @@ -254,6 +589,12 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "fnv" version = "1.0.7" @@ -312,6 +653,30 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "futures-sink" version = "0.3.32" @@ -332,6 +697,7 @@ checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-core", "futures-io", + "futures-macro", "futures-sink", "futures-task", "memchr", @@ -363,6 +729,18 @@ dependencies = [ "wasip3", ] +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "h2" version = "0.4.13" @@ -374,7 +752,7 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http", + "http 1.4.0", "indexmap", "slab", "tokio", @@ -403,6 +781,23 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http" version = "1.4.0" @@ -413,6 +808,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + [[package]] name = "http-body" version = "1.0.1" @@ -420,7 +826,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http", + "http 1.4.0", ] [[package]] @@ -431,8 +837,8 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http", - "http-body", + "http 1.4.0", + "http-body 1.0.1", "pin-project-lite", ] @@ -442,6 +848,63 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "httpmock" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08ec9586ee0910472dec1a1f0f8acf52f0fdde93aea74d70d4a3107b4be0fd5b" +dependencies = [ + "assert-json-diff", + "async-object-pool", + "async-std", + "async-trait", + "base64 0.21.7", + "basic-cookies", + "crossbeam-utils", + "form_urlencoded", + "futures-util", + "hyper 0.14.32", + "lazy_static", + "levenshtein", + "log", + "regex", + "serde", + "serde_json", + "serde_regex", + "similar", + "tokio", + "url", +] + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + [[package]] name = "hyper" version = "1.8.1" @@ -453,8 +916,8 @@ dependencies = [ "futures-channel", "futures-core", "h2", - "http", - "http-body", + "http 1.4.0", + "http-body 1.0.1", "httparse", "itoa", "pin-project-lite", @@ -470,8 +933,8 @@ version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "http", - "hyper", + "http 1.4.0", + "hyper 1.8.1", "hyper-util", "rustls", "rustls-pki-types", @@ -488,7 +951,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper", + "hyper 1.8.1", "hyper-util", "native-tls", "tokio", @@ -502,18 +965,18 @@ version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ - "base64", + "base64 0.22.1", "bytes", "futures-channel", "futures-util", - "http", - "http-body", - "hyper", + "http 1.4.0", + "http-body 1.0.1", + "hyper 1.8.1", "ipnet", "libc", "percent-encoding", "pin-project-lite", - "socket2", + "socket2 0.6.2", "system-configuration", "tokio", "tower-service", @@ -663,6 +1126,15 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.17" @@ -679,12 +1151,64 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lalrpop" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" +dependencies = [ + "ascii-canvas", + "bit-set", + "ena", + "itertools", + "lalrpop-util", + "petgraph", + "pico-args", + "regex", + "regex-syntax", + "string_cache", + "term", + "tiny-keccak", + "unicode-xid", + "walkdir", +] + +[[package]] +name = "lalrpop-util" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + [[package]] name = "leb128fmt" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" +[[package]] +name = "levenshtein" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" + [[package]] name = "libc" version = "0.2.182" @@ -727,6 +1251,9 @@ name = "log" version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" +dependencies = [ + "value-bag", +] [[package]] name = "memchr" @@ -768,6 +1295,12 @@ dependencies = [ "tempfile", ] +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + [[package]] name = "once_cell" version = "1.21.3" @@ -803,7 +1336,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", ] [[package]] @@ -830,6 +1363,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + [[package]] name = "parking_lot" version = "0.12.5" @@ -860,6 +1399,7 @@ dependencies = [ "anyhow", "clap", "dirs", + "httpmock", "libc", "regex", "reqwest", @@ -875,6 +1415,31 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + [[package]] name = "pin-project-lite" version = "0.2.16" @@ -887,12 +1452,37 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "piper" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c835479a4443ded371d6c535cbfd8d31ad92c5d23ae9770a61bc155e4992a3c1" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + [[package]] name = "pkg-config" version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +[[package]] +name = "polling" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix", + "windows-sys 0.61.2", +] + [[package]] name = "potential_utf" version = "0.1.4" @@ -902,6 +1492,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + [[package]] name = "prettyplease" version = "0.2.37" @@ -909,7 +1505,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn", + "syn 2.0.117", ] [[package]] @@ -991,17 +1587,17 @@ version = "0.12.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" dependencies = [ - "base64", + "base64 0.22.1", "bytes", "encoding_rs", "futures-channel", "futures-core", "futures-util", "h2", - "http", - "http-body", + "http 1.4.0", + "http-body 1.0.1", "http-body-util", - "hyper", + "hyper 1.8.1", "hyper-rustls", "hyper-tls", "hyper-util", @@ -1099,6 +1695,15 @@ version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "schannel" version = "0.1.28" @@ -1170,7 +1775,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", ] [[package]] @@ -1186,6 +1791,16 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_regex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8136f1a4ea815d7eac4101cfd0b16dc0cb5e1fe1b8609dfd728058656b7badf" +dependencies = [ + "regex", + "serde", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -1214,6 +1829,18 @@ dependencies = [ "libc", ] +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + +[[package]] +name = "siphasher" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" + [[package]] name = "slab" version = "0.4.12" @@ -1226,6 +1853,16 @@ version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "socket2" version = "0.6.2" @@ -1242,6 +1879,18 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" +[[package]] +name = "string_cache" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" +dependencies = [ + "new_debug_unreachable", + "parking_lot", + "phf_shared", + "precomputed-hash", +] + [[package]] name = "strsim" version = "0.11.1" @@ -1254,6 +1903,17 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "syn" version = "2.0.117" @@ -1282,7 +1942,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", ] [[package]] @@ -1319,6 +1979,17 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + [[package]] name = "thiserror" version = "1.0.69" @@ -1336,7 +2007,16 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", ] [[package]] @@ -1361,7 +2041,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.6.2", "tokio-macros", "windows-sys 0.61.2", ] @@ -1374,7 +2054,7 @@ checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", ] [[package]] @@ -1434,8 +2114,8 @@ dependencies = [ "bitflags", "bytes", "futures-util", - "http", - "http-body", + "http 1.4.0", + "http-body 1.0.1", "iri-string", "pin-project-lite", "tower", @@ -1522,12 +2202,28 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "value-bag" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba6f5989077681266825251a52748b8c1d8a4ad098cc37e440103d0ea717fc0" + [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "want" version = "0.3.1" @@ -1607,7 +2303,7 @@ dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn", + "syn 2.0.117", "wasm-bindgen-shared", ] @@ -1664,6 +2360,37 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + [[package]] name = "windows-link" version = "0.2.1" @@ -1951,7 +2678,7 @@ dependencies = [ "heck", "indexmap", "prettyplease", - "syn", + "syn 2.0.117", "wasm-metadata", "wit-bindgen-core", "wit-component", @@ -1967,7 +2694,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn", + "syn 2.0.117", "wit-bindgen-core", "wit-bindgen-rust", ] @@ -2034,7 +2761,7 @@ checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", "synstructure", ] @@ -2055,7 +2782,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", "synstructure", ] @@ -2095,7 +2822,7 @@ checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.117", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 848189e..2fba260 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,3 +20,4 @@ dirs = "5.0" libc = "0.2" [dev-dependencies] +httpmock = "0.7" diff --git a/README.md b/README.md index 2dd7f67..36f52b8 100644 --- a/README.md +++ b/README.md @@ -1,127 +1,71 @@ -# pathFinder # +# Path Finder -pathFinder is a tool for mounting SKA data on Slurm clusters without copying the data locally. +A Rust implementation of the SKA path finder tool for authentication, locating & mounting data from the SKA storage system within a Slurm login host. -It allows the Scientist to specify which files, identified from the Science Gateway, they want to mount while keeping the files secure and owned by them. -## TODO Development +## Overview -- [ ] Always check site capabilities to ensure that the data is staged to your local RSE. - - [ ] Work out whether we need to check for tier 0. -- [ ] Tidy up the code around checking the response from the DM API `data/locate` request. -- [ ] Use this script to perform the data mount. -- [ ] Investigate whether the data can be specified using the IVO URI. +This project replaces the Python/Bash-based path finder (see git history) with a portable Rust implementation. It provides a single binary and an RPM installer. -## HOW TO Try this script during development +## Features -1. Ensure you have installed `uv` - +- OAuth2 device code flow authentication +- Data location lookup via Data Management API +- Site capabilities verification via Site Capabilities API +- Secure data mounting with proper permissions - uv --version +## Building - NB., you can use other dependency managers which use the `pyproject.toml` - e.g. `poetry`. Hint: `uv` is way faster! +The binary and RPM are built and published on a GitHub release. -2. Set your Data Management API Access Token: +## Installation - 1. Navigate to - 2. Click your initials badge in the top-right and select "View Token" - 3. Copy the "Data management access token" string - 4. Set the DATA_MANAGEMENT_ACCESS_TOKEN environment variable in your shell: +1. Find the latest release in GitHub, and copy the URL of the published RPM. - export DATA_MANAGEMENT_ACCESS_TOKEN=[PASTED STRING] +2. On the Slurm login node: -3. Run the script while `uv` takes care of the dependencies for you: + sudo dnf install [URL_TO_RELEASE_ARTEFACT] - uv run path_finder/path_finder.py +## Usage -## USE CASE - -Two methods are planned, interactive and a workflow managed by the Science Gateway via prepareData. - -This documentation covers the prerequisites to setup on the underlying configuration on a Slurm cluster and the installation of the pathFinder tool. - - -## Pre-requisites ## - -The following requirements must be met. - -(Note these are for Rocky 9.x releases and have not been tested on RHEL 10.x or Ubuntu) - - - CRB Enabled - - RHEL EPEL (Extra Packages) - - BindFS - - Ceph Common - -## Server Side Configuration ## - -The configuration is only required on the Login node of your Slurm cluster, this assumes that all your user home directories are CephFS/NFS mount points. - -If you already have EPEL enabled you can skip the next 2 steps. - -1. Enable CRB +With OAuth2 authentication (recommended): -``` -crb status -crb enable +```bash +sudo pathFinder \ + --namespace daac \ + --file_name pi24_test_run_1_cleaned.fits ``` -2. Install EPEL -``` -sudo dnf install epel-release -sudo dnf repolist -``` +With environment variables (for automation): -3. Configure your Ceph Keyring +```bash +export DATA_MANAGEMENT_ACCESS_TOKEN="your_token_here" +export SITE_CAPABILITIES_ACCESS_TOKEN="your_token_here" -``` -vi /etc/ceph/ceph.client.rucio_prod_ro.keyring -``` -Add your Access key. -``` -[client.rucio_prod_ro] -key = **************************** +sudo pathFinder \ + --namespace daac \ + --file_name pi24_test_run_1_cleaned.fits \ + --no-login ``` -4. Add an /etc/fstab entry -``` -10.4.200.9:6789,10.4.200.13:6789,10.4.200.13:6789,10.4.200.17:6789,10.4.200.25:6789,10.4.200.26:6789:/volumes/_nogroup/a8af40e8-6412-44da-ad08-3731fdf19258/4945e5c2-aab7-4416-9b75-666f2af512d7 /skadata ceph name=rucio_prod_ro,x-systemd.device-timeout=30,x-systemd.mount-timeout=30,noatime,_netdev,ro,nodev,nosuid 0 2 -``` -5. Mount the /skadata mountpoint. +**Note**: The tool will automatically check if the file exists locally at `/skadata`. If the file is not found locally, it will display the sites where the file is available and prompt you to ensure the data has been staged to your local site before mounting. -Note that we use bindfs here as well so all files under `/skadata` are presented as `root root` for owner and group and hides the real owner **uid/gid** which would typically be the xrootd, Webdav & Storm user uid/gid. -``` -mount /skadata -systemctl daemon-reload -bindfs -u root -g root /skadata /skadata -``` +## Architecture -6. Create a mountpoint, this MUST be owned by root with permissions of 550. -``` -sudo mkdir /skadata -sudo chmod 550 /skadata -``` +### Modules -7. Add a sudoers file to control access to the pathfinder tool. -``` -vi /etc/sudoers.d/pathFinder -``` -Using group `pathfinder` for group access for users you want to give access to. -``` -%pathfinder ALL = NOPASSWD: /usr/bin/pathfinder, /usr/bin/pathFinder -``` +- **main.rs** - Main path finder CLI logic +- **api_client.rs** - HTTP client for Data Management and Site Capabilities APIs +- **oauth2_auth.rs** - OAuth2 device code flow implementation with token caching +- **models.rs** - Data structures for API responses (sites, nodes, storage areas, data locations) +- **mount.rs** - Mount/unmount utility for data access -8. Add the local groups. -``` -groupadd pathfinder -``` +## System Requirements -9. Add or update the local users to their corresponding group. -``` -usermod -a -G pathfinder sm2921 -``` -10. Install the pathFinder package. -``` -dnf install https://github.com/uksrc/pathFinder/releases/download/v1.0.0/pathfinder-1.0.0-1.x86_64.rpm -``` +- **bindfs** - FUSE filesystem for permission remapping +- **sudo** - Required for mount operations +- **mountpoint** - Used to verify mount status +The system needs to have the local RSE mounted at `/skadata` as a 700 mount owned by root:root. TODO: Ensure the program checks this and reports correctly if the share is not present. +A sudoers file needs to be added to allow members for the pathfinders group sudo privileges to the executable - TODO: Add this to the RPM. -The RSE location will be used to run a `bindfs` command on the parent folder to mount this into the user's `~/.skadata/` directory, setting the user and group to the current user. The specific file from the parent folder will then be used to `mount --bind` that file to `~/skadata/[FILE_NAME]`. diff --git a/RUST_README.md b/RUST_README.md deleted file mode 100644 index c4a50bc..0000000 --- a/RUST_README.md +++ /dev/null @@ -1,186 +0,0 @@ -# Path Finder - Rust Implementation - -A Rust implementation of the SKA path finder tool for locating and mounting data from SKA storage systems. - -## Overview - -This project replaces the Python-based path finder with a high-performance Rust implementation. It provides two binaries: - -1. **path-finder** - Main CLI tool for authenticating and locating SKA data -2. **pathfinder-mount** - Utility for mounting/unmounting data using bindfs - -## Features - -- OAuth2 device code flow authentication -- Token caching for improved performance -- Data location lookup via Data Management API -- Site capabilities verification via Site Capabilities API -- Automated data mounting with proper permissions -- Error handling and validation - -## Building - -```bash -cargo build --release -``` - -The binaries will be available in `target/release/`: - -- `target/release/path-finder` -- `target/release/pathfinder-mount` - -## Installation - -### Option 1: Install from source - -```bash -cargo install --path . -``` - -### Option 2: Manual installation - -```bash -sudo cp target/release/path-finder /usr/local/bin/ -sudo cp target/release/pathfinder-mount /usr/local/bin/ -sudo chmod +x /usr/local/bin/path-finder -sudo chmod +x /usr/local/bin/pathfinder-mount -``` - -## Usage - -### Main Path Finder - -With OAuth2 authentication (recommended): - -```bash -path-finder \ - --namespace daac \ - --file_name pi24_test_run_1_cleaned.fits -``` - -With environment variables (for automation): - -```bash -export DATA_MANAGEMENT_ACCESS_TOKEN="your_token_here" -export SITE_CAPABILITIES_ACCESS_TOKEN="your_token_here" - -path-finder \ - --namespace daac \ - --file_name pi24_test_run_1_cleaned.fits \ - --no-login -``` - -**Note**: The tool will automatically check if the file exists locally at `/skadata`. If the file is not found locally, it will display the sites where the file is available and prompt you to ensure the data has been staged to your local site before mounting. - -### Mount Utility - -The mount utility is designed to be called with sudo privileges. It handles: - -- Creating bind mounts from `/skadata` to user home directories -- Setting appropriate permissions -- Managing mount points to avoid cyclic mounts - -Mount data: - -```bash -sudo pathfinder-mount --mount /daac/pi24_test_run_1_cleaned.fits daac -``` - -Unmount data: - -```bash -sudo pathfinder-mount --unmount /daac/pi24_test_run_1_cleaned.fits daac -``` - -## Architecture - -### Modules - -- **oauth2_auth.rs** - OAuth2 device code flow implementation with token caching -- **models.rs** - Data structures for API responses (sites, nodes, storage areas, data locations) -- **api_client.rs** - HTTP client for Data Management and Site Capabilities APIs -- **main.rs** - Main path finder CLI logic -- **mount.rs** - Mount/unmount utility for data access - -### Authentication Flow - -1. Initiate device code flow with authn service -2. Display user code and verification URL -3. Poll for authentication completion -4. Exchange device token for API-specific tokens -5. Cache tokens for future use (default: 1 hour) - -### Data Location Flow - -1. Verify namespace exists in Data Management API -2. Verify site name exists in Site Capabilities API -3. Fetch site storage area mappings -4. Locate data file in namespace -5. Verify data is available at requested site -6. Extract RSE path from replica URIs -7. Call mount utility to make data accessible - -## Dependencies - -- **clap** - Command-line argument parsing -- **reqwest** - HTTP client -- **serde** - Serialization/deserialization -- **anyhow** - Error handling -- **regex** - Pattern matching for RSE paths -- **dirs** - Cross-platform config directory location - -## System Requirements - -- **bindfs** - FUSE filesystem for permission remapping -- **sudo** - Required for mount operations -- **mountpoint** - Used to verify mount status - -## Token Caching - -Tokens are cached in `~/.config/path-finder/tokens.json` with secure permissions (0600). -Cache expires after 1 hour (configurable in code). - -## Error Handling - -The tool provides detailed error messages for: - -- Network failures -- Authentication failures -- Missing data or sites -- Permission issues -- Mount failures - -## Comparison with Python Implementation - -| Feature | Python | Rust | -| -------------- | ------------------------- | ----------------- | -| Performance | Slower | Faster | -| Memory Usage | Higher | Lower | -| Binary Size | N/A (interpreted) | ~6MB (release) | -| Dependencies | Runtime Python + packages | Statically linked | -| Error Messages | Good | Excellent | -| Type Safety | Runtime (Pydantic) | Compile-time | - -## Development - -Run tests: - -```bash -cargo test -``` - -Format code: - -```bash -cargo fmt -``` - -Lint code: - -```bash -cargo clippy -``` - -## License - -Same as the original Python implementation. diff --git a/src/api_client.rs b/src/api_client.rs index 8c73a10..a6cddc9 100644 --- a/src/api_client.rs +++ b/src/api_client.rs @@ -1,3 +1,5 @@ +//! API client code for interacting with the SRCNet APIs + use crate::models::*; use anyhow::{Context, Result}; use reqwest::blocking::Client; @@ -5,10 +7,31 @@ use reqwest::blocking::Client; const DM_API_BASEURL: &str = "https://data-management.srcnet.skao.int/api/v1"; const SC_API_BASEURL: &str = "https://site-capabilities.srcnet.skao.int/api/v1"; +/// API client for interacting with the Path Finder APIs +/// +/// This trait allows for abstraction and easier testing of API interactions. +/// The `ApiClient` struct provides a concrete implementation. +pub trait PathFinderApiClient { + + /// Checks if the specified namespace is available by querying the DM API. + fn check_namespace_available(&self, namespace: &str) -> Result<()>; + + /// Retrieves a list of all available namespaces from the DM API. + fn get_all_namespaces(&self) -> Result>; + + /// Retrieves a mapping of storage area IDs to their associated node and site information from the SC API. + fn site_storage_areas(&self) -> Result; + + /// Locates the specified data file within the given namespace by querying the DM API. + fn locate_data(&self, namespace: &str, file_name: &str) -> Result; +} + pub struct ApiClient { client: Client, dm_token: String, sc_token: String, + dm_base_url: String, + sc_base_url: String, } impl ApiClient { @@ -17,10 +40,33 @@ impl ApiClient { client: Client::new(), dm_token, sc_token, + dm_base_url: DM_API_BASEURL.to_string(), + sc_base_url: SC_API_BASEURL.to_string(), + } + } + + #[cfg(test)] + pub fn new_with_urls( + dm_token: String, + sc_token: String, + dm_base_url: String, + sc_base_url: String, + ) -> Self { + Self { + client: Client::new(), + dm_token, + sc_token, + dm_base_url, + sc_base_url, } } +} - pub fn check_namespace_available(&self, namespace: &str) -> Result<()> { +/// Implementation of the `PathFinderApiClient` trait for `ApiClient`, providing concrete logic for API interactions. +/// +/// See the trait for method documentation. +impl PathFinderApiClient for ApiClient { + fn check_namespace_available(&self, namespace: &str) -> Result<()> { let namespaces = self.get_all_namespaces()?; if !namespaces.contains(&namespace.to_string()) { anyhow::bail!( @@ -32,8 +78,8 @@ impl ApiClient { Ok(()) } - pub fn get_all_namespaces(&self) -> Result> { - let url = format!("{}/data/list", DM_API_BASEURL); + fn get_all_namespaces(&self) -> Result> { + let url = format!("{}/data/list", self.dm_base_url); let response = self .client .get(&url) @@ -48,8 +94,8 @@ impl ApiClient { .context("Failed to parse namespaces response") } - pub fn site_storage_areas(&self) -> Result { - let url = format!("{}/nodes", SC_API_BASEURL); + fn site_storage_areas(&self) -> Result { + let url = format!("{}/nodes", self.sc_base_url); let response = self .client .get(&url) @@ -61,26 +107,27 @@ impl ApiClient { .error_for_status() .context("SC API request failed")?; - let response_text = response.text() - .context("Failed to read response body")?; + let response_text = response.text().context("Failed to read response body")?; - let nodes: NodesAPIResponse = serde_json::from_str(&response_text) - .with_context(|| { - format!( - "Failed to parse nodes response. Response body:\n{}", - if response_text.len() > 1000 { - format!("{}... (truncated)", &response_text[..1000]) - } else { - response_text.clone() - } - ) - })?; + let nodes: NodesAPIResponse = serde_json::from_str(&response_text).with_context(|| { + format!( + "Failed to parse nodes response. Response body:\n{}", + if response_text.len() > 1000 { + format!("{}... (truncated)", &response_text[..1000]) + } else { + response_text.clone() + } + ) + })?; Ok(get_all_node_storage_areas(&nodes)) } - pub fn locate_data(&self, namespace: &str, file_name: &str) -> Result { - let url = format!("{}/data/locate/{}/{}", DM_API_BASEURL, namespace, file_name); + fn locate_data(&self, namespace: &str, file_name: &str) -> Result { + let url = format!( + "{}/data/locate/{}/{}", + self.dm_base_url, namespace, file_name + ); let response = self .client .get(&url) @@ -97,19 +144,262 @@ impl ApiClient { .error_for_status() .context("DM API locate request failed")?; - let response_text = response.text() - .context("Failed to read response body")?; + let response_text = response.text().context("Failed to read response body")?; - serde_json::from_str(&response_text) - .with_context(|| { - format!( - "Failed to parse data locations response. Response body:\n{}", - if response_text.len() > 1000 { - format!("{}... (truncated)", &response_text[..1000]) - } else { - response_text.clone() - } - ) - }) + serde_json::from_str(&response_text).with_context(|| { + format!( + "Failed to parse data locations response. Response body:\n{}", + if response_text.len() > 1000 { + format!("{}... (truncated)", &response_text[..1000]) + } else { + response_text.clone() + } + ) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use httpmock::prelude::*; + + fn client_for(dm_server: &MockServer, sc_server: &MockServer) -> ApiClient { + ApiClient::new_with_urls( + "dm-token".to_string(), + "sc-token".to_string(), + dm_server.base_url(), + sc_server.base_url(), + ) + } + + // --- get_all_namespaces --- + + #[test] + fn get_all_namespaces_returns_parsed_list() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/list"); + then.status(200).body(r#"["daac","lsst","ska-mid"]"#); + }); + + let namespaces = client_for(&dm, &sc).get_all_namespaces().unwrap(); + assert_eq!(namespaces, vec!["daac", "lsst", "ska-mid"]); + } + + #[test] + fn get_all_namespaces_propagates_401() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/list"); + then.status(401).body("Unauthorized"); + }); + + let err = client_for(&dm, &sc).get_all_namespaces().unwrap_err(); + assert!(err.to_string().contains("DM API request failed"), "{err}"); + } + + #[test] + fn get_all_namespaces_propagates_500() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/list"); + then.status(500).body("Internal Server Error"); + }); + + assert!(client_for(&dm, &sc).get_all_namespaces().is_err()); + } + + // --- check_namespace_available --- + + #[test] + fn check_namespace_available_succeeds_when_present() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/list"); + then.status(200).body(r#"["daac","lsst"]"#); + }); + + assert!(client_for(&dm, &sc) + .check_namespace_available("daac") + .is_ok()); + } + + #[test] + fn check_namespace_available_bails_when_absent() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/list"); + then.status(200).body(r#"["lsst"]"#); + }); + + let err = client_for(&dm, &sc) + .check_namespace_available("daac") + .unwrap_err(); + assert!(err.to_string().contains("not found"), "{err}"); + } + + // --- site_storage_areas --- + + #[test] + fn site_storage_areas_empty_nodes_returns_empty_map() { + let dm = MockServer::start(); + let sc = MockServer::start(); + sc.mock(|when, then| { + when.method(GET).path("/nodes"); + then.status(200).body("[]"); + }); + + let map = client_for(&dm, &sc).site_storage_areas().unwrap(); + assert!(map.is_empty()); + } + + #[test] + fn site_storage_areas_parses_node_storage_mapping() { + let dm = MockServer::start(); + let sc = MockServer::start(); + sc.mock(|when, then| { + when.method(GET).path("/nodes"); + then.status(200).body( + r#"[ + { + "name": "uk-node", + "description": "UK Node", + "sites": [{ + "id": "site-1", + "name": "Oxford", + "country": "GB", + "storages": [{ + "id": "storage-1", + "name": "primary", + "areas": [{ + "id": "area-abc", + "name": "fits-store", + "type": "disk", + "relative_path": "/data", + "tier": 1 + }] + }] + }] + } + ]"#, + ); + }); + + let map = client_for(&dm, &sc).site_storage_areas().unwrap(); + assert!(map.contains_key("area-abc")); + let (node, site, area) = map.get("area-abc").unwrap(); + assert_eq!(node, "uk-node"); + assert_eq!(site, "Oxford"); + assert_eq!(area, "fits-store"); + } + + #[test] + fn site_storage_areas_propagates_401() { + let dm = MockServer::start(); + let sc = MockServer::start(); + sc.mock(|when, then| { + when.method(GET).path("/nodes"); + then.status(401); + }); + + let err = client_for(&dm, &sc).site_storage_areas().unwrap_err(); + assert!(err.to_string().contains("SC API request failed"), "{err}"); + } + + #[test] + fn site_storage_areas_errors_on_malformed_json() { + let dm = MockServer::start(); + let sc = MockServer::start(); + sc.mock(|when, then| { + when.method(GET).path("/nodes"); + then.status(200).body("not json at all"); + }); + + let err = client_for(&dm, &sc).site_storage_areas().unwrap_err(); + assert!( + err.to_string().contains("Failed to parse nodes response"), + "{err}" + ); + } + + // --- locate_data --- + + #[test] + fn locate_data_returns_parsed_locations() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/locate/daac/file.fits"); + then.status(200).body( + r#"[ + { + "identifier": "loc-1", + "associated_storage_area_id": "area-abc", + "replicas": ["rucio://rse1/daac/2022/file.fits"] + } + ]"#, + ); + }); + + let locations = client_for(&dm, &sc) + .locate_data("daac", "file.fits") + .unwrap(); + assert_eq!(locations.len(), 1); + assert_eq!(locations[0].identifier, "loc-1"); + assert_eq!(locations[0].replicas[0], "rucio://rse1/daac/2022/file.fits"); + } + + #[test] + fn locate_data_returns_empty_list() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/locate/daac/missing.fits"); + then.status(200).body("[]"); + }); + + let locations = client_for(&dm, &sc) + .locate_data("daac", "missing.fits") + .unwrap(); + assert!(locations.is_empty()); + } + + #[test] + fn locate_data_propagates_404() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/locate/daac/file.fits"); + then.status(404); + }); + + assert!(client_for(&dm, &sc) + .locate_data("daac", "file.fits") + .is_err()); + } + + #[test] + fn locate_data_errors_on_malformed_json() { + let dm = MockServer::start(); + let sc = MockServer::start(); + dm.mock(|when, then| { + when.method(GET).path("/data/locate/daac/file.fits"); + then.status(200).body("{bad json}"); + }); + + let err = client_for(&dm, &sc) + .locate_data("daac", "file.fits") + .unwrap_err(); + assert!( + err.to_string() + .contains("Failed to parse data locations response"), + "{err}" + ); } } diff --git a/src/main.rs b/src/main.rs index 8932d13..db755c5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -10,7 +10,7 @@ use std::collections::HashSet; use std::env; use std::process::exit; -use api_client::ApiClient; +use api_client::{ApiClient, PathFinderApiClient}; use models::{DataLocation, StorageAreaIDToNodeAndSite}; use oauth2_auth::{authenticate, Tokens}; @@ -42,8 +42,7 @@ fn main() -> Result<()> { // Handle unmount operation (no API calls needed) if args.unmount { - let sudo_user = env::var("SUDO_USER") - .context("SUDO_USER not set")?; + let sudo_user = env::var("SUDO_USER").context("SUDO_USER not set")?; let fits_path = format!("/{}/{}", args.namespace, args.file_name); mount::unmount_operation(&fits_path, &args.namespace, &sudo_user)?; @@ -60,11 +59,7 @@ fn main() -> Result<()> { tokens }; - run( - &args.namespace, - &args.file_name, - &tokens, - ) + run(&args.namespace, &args.file_name, &tokens) } fn check_privileges(args: &Args) -> Result<()> { @@ -76,11 +71,15 @@ fn check_privileges(args: &Args) -> Result<()> { eprintln!("\nError: This tool requires root privileges for mount/unmount operations."); eprintln!("Please re-run with sudo:"); if args.unmount { - eprintln!(" sudo -E path-finder --namespace {} --file_name {} --unmount", - args.namespace, args.file_name); + eprintln!( + " sudo -E path-finder --namespace {} --file_name {} --unmount", + args.namespace, args.file_name + ); } else { - eprintln!(" sudo -E path-finder --namespace {} --file_name {}", - args.namespace, args.file_name); + eprintln!( + " sudo -E path-finder --namespace {} --file_name {}", + args.namespace, args.file_name + ); } anyhow::bail!("Insufficient privileges - sudo required"); } @@ -152,10 +151,16 @@ fn print_data_locations_with_sites( data_locations: &[DataLocation], ) { for location in data_locations { - if let Some((node_name, site_name, area_name)) = site_stores.get(&location.associated_storage_area_id) { + if let Some((node_name, site_name, area_name)) = + site_stores.get(&location.associated_storage_area_id) + { println!( "Data location ID: {}, Storage Area: {} ({}), Node: {}, Site: {}", - location.identifier, area_name, location.associated_storage_area_id, node_name, site_name + location.identifier, + area_name, + location.associated_storage_area_id, + node_name, + site_name ); } else { println!( @@ -219,14 +224,19 @@ fn extract_rse_path( } fn mount_data(rse_path: &str, namespace: &str) -> Result<()> { - println!("Mounting data from RSE path: {} in namespace: {}", rse_path, namespace); + println!( + "Mounting data from RSE path: {} in namespace: {}", + rse_path, namespace + ); // Get the original user (already verified in check_privileges()) - let sudo_user = env::var("SUDO_USER") - .context("SUDO_USER not set")?; + let sudo_user = env::var("SUDO_USER").context("SUDO_USER not set")?; mount::mount_operation(rse_path, namespace, &sudo_user)?; - println!("Successfully mounted {} in namespace {}", rse_path, namespace); + println!( + "Successfully mounted {} in namespace {}", + rse_path, namespace + ); Ok(()) } From a8d3a9e97024c96dd6df428512a2a877a8d12842 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 30 Mar 2026 13:44:16 +0100 Subject: [PATCH 17/27] feat(SOG-480): Add github action to run tests on each commit --- .github/workflows/ci.yml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .github/workflows/ci.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..537294b --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,25 @@ +name: CI + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + + - name: Cache dependencies + uses: actions/cache@v5 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + + - name: Run tests + run: cargo test From e078219008ca274f93e663c817071092cf5d8c49 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 30 Mar 2026 15:54:06 +0100 Subject: [PATCH 18/27] chore(SOG-480): Add tests to API models module --- src/api_client.rs | 3 +- src/models.rs | 325 +++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 326 insertions(+), 2 deletions(-) diff --git a/src/api_client.rs b/src/api_client.rs index a6cddc9..7deaba9 100644 --- a/src/api_client.rs +++ b/src/api_client.rs @@ -341,7 +341,8 @@ mod tests { { "identifier": "loc-1", "associated_storage_area_id": "area-abc", - "replicas": ["rucio://rse1/daac/2022/file.fits"] + "replicas": ["rucio://rse1/daac/2022/file.fits"], + "is_dataset": false } ]"#, ); diff --git a/src/models.rs b/src/models.rs index 191dfc9..3f28e03 100644 --- a/src/models.rs +++ b/src/models.rs @@ -1,15 +1,45 @@ +//! Data models for the SRCNet APIs + use serde::{Deserialize, Serialize}; use std::collections::HashMap; +/// A single data location returned by the DM API, describing where a file replica or replicas live. +/// +/// Example: +/// +/// { +/// "identifier": "UKSRC-CAM-T0", +/// "associated_storage_area_id": "2a73d212-8793-4011-a687-cad99841c269", +/// "replicas": [ +/// "davs://xrootd01.cam.uksrc.org:1094/skadata/daac/08/06/random10MiB.bin" +/// ], +/// "is_dataset": false +/// } +/// +/// #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DataLocation { pub identifier: String, pub associated_storage_area_id: String, pub replicas: Vec, + pub is_dataset: bool, } +/// The full response from the DM API locate endpoint: a list of [`DataLocation`] entries. pub type DataLocationAPIResponse = Vec; +/// A storage area within a [`Storage`] resource at a site. +/// +/// Default values are required as some fields are not populated in the API response. +/// +/// Example: +/// { +/// "id": "ce04d165-4d5f-4380-a674-2a9ae4aba75e", +/// "type": "rse", +/// "relative_path": "/", +/// "name": "UKSRC_RAL_XRD", +/// "tier": 1 +/// } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct StorageArea { pub id: String, @@ -22,6 +52,38 @@ pub struct StorageArea { pub tier: Option, } +/// A storage resource at a site, containing one or more [`StorageArea`]s. +/// +/// Default values are required as some fields are not populated in the API response. +/// +/// Example: +/// { +/// "id": "12345678-90ab-cdef-1234-567890abcdef", +/// "host": "myxrootd.example.com", +/// "base_path": "/base/data/", +/// "srm": "xrd", +/// "device_type": "hdd", +/// "size_in_terabytes": 200, +/// "name": "UKSRC_RAL_XRD", +/// "supported_protocols": [ +/// { +/// "prefix": "https", +/// "port": 1094 +/// } +/// ], +/// "downtime": [ +/// { +/// "id": "12345678-90ab-cdef-1234-567890abcdef", +/// "date_range": "2099-03-15T12:00:00.000Z to 2099-04-01T11:59:59.999Z", +/// "type": "Planned", +/// "reason": "Beware the Ides of March! Don't be a fool!" +/// } +/// ], +/// "areas": [ +/// ... +/// ] +/// } +/// #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Storage { pub id: String, @@ -29,8 +91,30 @@ pub struct Storage { pub name: String, #[serde(default)] pub areas: Vec, + // ... other fields omitted } +/// A physical site belonging to a [`Node`], containing one or more [`Storage`] resources. +/// +/// Default values are required as some fields are not populated in the API response. +/// +/// Example: +/// { +/// "id": "12345678-90ab-cdef-1234-567890abcdef", +/// "name": "UKSRC-RAL", +/// "description": "Rutherford Appleton Laboratory", +/// "country": "GB", +/// "latitude": 51.5707, +/// "longitude": -1.3088, +/// "primary_contact_email": "onna@example.com ", +/// "secondary_contact_email": "otoko@example.com", +/// "storages": [ +/// ... +/// ], +/// "compute": [ +/// ... +/// ] +/// } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Site { pub id: String, @@ -39,9 +123,11 @@ pub struct Site { pub country: String, #[serde(default)] pub storages: Vec, + // ... other fields omitted } impl Site { + /// Returns a flat list of all [`StorageArea`]s across every [`Storage`] at this site. pub fn storage_areas(&self) -> Vec<&StorageArea> { self.storages .iter() @@ -50,6 +136,22 @@ impl Site { } } +/// An SRCNet node, as returned from the SC API /nodes endpoint, grouping one or more [`Site`]s under a common name. +/// +/// Default values are required as some fields are not populated in the API response. +/// +/// Example: +/// { +/// "name": "UKSRC", +/// "description": "UKSRC Node", +/// "sites": [ +/// ... +/// ], +/// "last_updated_at": "2026-03-19T14:24:37.869190", +/// "last_updated_by_username": "ma2223", +/// "version": 50 +/// } +/// #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Node { pub name: String, @@ -57,9 +159,12 @@ pub struct Node { pub description: String, #[serde(default)] pub sites: Vec, -} + // ... other fields omitted + } impl Node { + /// Builds a map from storage area ID to a `(node_name, site_name, area_name)` tuple + /// for every storage area across all sites in this node. pub fn storage_area_id_to_site_name(&self) -> HashMap { let mut mapping = HashMap::new(); for site in &self.sites { @@ -74,9 +179,15 @@ impl Node { } } +/// The full response from the SC API nodes endpoint: a list of [`Node`]s. pub type NodesAPIResponse = Vec; + +/// A map from storage area ID to a `(node_name, site_name, area_name)` tuple, +/// aggregated across all nodes. pub type StorageAreaIDToNodeAndSite = HashMap; +/// Aggregates storage area mappings across all provided nodes into a single +/// [`StorageAreaIDToNodeAndSite`] map. pub fn get_all_node_storage_areas(nodes: &[Node]) -> StorageAreaIDToNodeAndSite { let mut storage_area_mapping = HashMap::new(); for node in nodes { @@ -84,3 +195,215 @@ pub fn get_all_node_storage_areas(nodes: &[Node]) -> StorageAreaIDToNodeAndSite } storage_area_mapping } + +#[cfg(test)] +mod tests { + use super::*; + + const RAL_AREA_ID: &str = "ce04d165-4d5f-4380-a674-2a9ae4aba75e"; + const CAM_AREA_ID: &str = "2a73d212-8793-4011-a687-cad99841c269"; + const RAL_SITE_ID: &str = "a1b2c3d4-e5f6-7890-abcd-ef1234567890"; + const CAM_SITE_ID: &str = "b2c3d4e5-f6a7-8901-bcde-f12345678901"; + + // --- helpers --- + + fn make_area(id: &str, name: &str) -> StorageArea { + StorageArea { + id: id.to_string(), + name: name.to_string(), + storage_type: "rse".to_string(), + relative_path: "/".to_string(), + tier: Some(1), + } + } + + fn make_storage(id: &str, name: &str, areas: Vec) -> Storage { + Storage { + id: id.to_string(), + name: name.to_string(), + areas, + } + } + + fn make_site(id: &str, name: &str, storages: Vec) -> Site { + Site { + id: id.to_string(), + name: name.to_string(), + country: "GB".to_string(), + storages, + } + } + + fn make_node(name: &str, sites: Vec) -> Node { + Node { + name: name.to_string(), + description: format!("{} Node", name), + sites, + } + } + + // --- Site::storage_areas --- + + #[test] + fn site_storage_areas_empty_storages_returns_empty() { + let site = make_site(RAL_SITE_ID, "UKSRC-RAL", vec![]); + assert!(site.storage_areas().is_empty()); + } + + #[test] + fn site_storage_areas_flattens_multiple_storages() { + let site = make_site( + RAL_SITE_ID, + "UKSRC-RAL", + vec![ + make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")]), + make_storage( + "st2", + "UKSRC_RAL_STORM", + vec![ + make_area(CAM_AREA_ID, "UKSRC_RAL_STORM"), + make_area("c3d4e5f6-a7b8-9012-cdef-123456789012", "UKSRC_RAL_TAPE"), + ], + ), + ], + ); + let areas = site.storage_areas(); + assert_eq!(areas.len(), 3); + let ids: Vec<&str> = areas.iter().map(|a| a.id.as_str()).collect(); + assert!(ids.contains(&RAL_AREA_ID)); + assert!(ids.contains(&CAM_AREA_ID)); + assert!(ids.contains(&"c3d4e5f6-a7b8-9012-cdef-123456789012")); + } + + // --- Node::storage_area_id_to_site_name --- + + #[test] + fn storage_area_id_to_site_name_empty_sites_returns_empty() { + let node = make_node("UKSRC", vec![]); + assert!(node.storage_area_id_to_site_name().is_empty()); + } + + #[test] + fn storage_area_id_to_site_name_maps_correctly() { + let node = make_node( + "UKSRC", + vec![make_site( + RAL_SITE_ID, + "UKSRC-RAL", + vec![make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")])], + )], + ); + let map = node.storage_area_id_to_site_name(); + assert_eq!(map.len(), 1); + let (node_name, site_name, area_name) = map.get(RAL_AREA_ID).unwrap(); + assert_eq!(node_name, "UKSRC"); + assert_eq!(site_name, "UKSRC-RAL"); + assert_eq!(area_name, "UKSRC_RAL_XRD"); + } + + #[test] + fn storage_area_id_to_site_name_multiple_sites() { + let node = make_node( + "UKSRC", + vec![ + make_site(RAL_SITE_ID, "UKSRC-RAL", vec![make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")])]), + make_site(CAM_SITE_ID, "UKSRC-CAM", vec![make_storage("st2", "UKSRC_CAM_XRD", vec![make_area(CAM_AREA_ID, "UKSRC_CAM_XRD")])]), + ], + ); + let map = node.storage_area_id_to_site_name(); + assert_eq!(map.len(), 2); + assert_eq!(map.get(RAL_AREA_ID).unwrap().1, "UKSRC-RAL"); + assert_eq!(map.get(CAM_AREA_ID).unwrap().1, "UKSRC-CAM"); + } + + // --- get_all_node_storage_areas --- + + #[test] + fn get_all_node_storage_areas_empty_nodes_returns_empty() { + let map = get_all_node_storage_areas(&[]); + assert!(map.is_empty()); + } + + #[test] + fn get_all_node_storage_areas_aggregates_across_nodes() { + let aussrc_area_id = "d4e5f6a7-b8c9-0123-defa-234567890123"; + let nodes = vec![ + make_node( + "UKSRC", + vec![make_site(RAL_SITE_ID, "UKSRC-RAL", vec![make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")])])], + ), + make_node( + "AUSSRC", + vec![make_site("e5f6a7b8-c9d0-1234-efab-345678901234", "AUSSRC-ICRAR", vec![make_storage("st2", "AUSSRC_ICRAR_XRD", vec![make_area(aussrc_area_id, "AUSSRC_ICRAR_XRD")])])], + ), + ]; + let map = get_all_node_storage_areas(&nodes); + assert_eq!(map.len(), 2); + assert_eq!(map.get(RAL_AREA_ID).unwrap().0, "UKSRC"); + assert_eq!(map.get(aussrc_area_id).unwrap().0, "AUSSRC"); + } + + #[test] + fn get_all_node_storage_areas_later_node_wins_on_duplicate_id() { + let nodes = vec![ + make_node( + "UKSRC", + vec![make_site(RAL_SITE_ID, "UKSRC-RAL", vec![make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")])])], + ), + make_node( + "AUSSRC", + vec![make_site("e5f6a7b8-c9d0-1234-efab-345678901234", "AUSSRC-ICRAR", vec![make_storage("st2", "AUSSRC_ICRAR_XRD", vec![make_area(RAL_AREA_ID, "AUSSRC_ICRAR_XRD")])])], + ), + ]; + let map = get_all_node_storage_areas(&nodes); + assert_eq!(map.len(), 1); + assert_eq!(map.get(RAL_AREA_ID).unwrap().0, "AUSSRC"); + } + + // --- DataLocation deserialisation --- + + #[test] + fn data_location_deserialises_from_json() { + let json = r#"{ + "identifier": "UKSRC-CAM-T0", + "associated_storage_area_id": "2a73d212-8793-4011-a687-cad99841c269", + "replicas": ["davs://xrootd01.cam.uksrc.org:1094/skadata/daac/08/06/random10MiB.bin"], + "is_dataset": false + }"#; + let loc: DataLocation = serde_json::from_str(json).unwrap(); + assert_eq!(loc.identifier, "UKSRC-CAM-T0"); + assert_eq!(loc.associated_storage_area_id, "2a73d212-8793-4011-a687-cad99841c269"); + assert_eq!(loc.replicas[0], "davs://xrootd01.cam.uksrc.org:1094/skadata/daac/08/06/random10MiB.bin"); + assert!(!loc.is_dataset); + } + + // --- StorageArea deserialisation --- + + #[test] + fn storage_area_deserialises_from_json() { + let json = r#"{ + "id": "ce04d165-4d5f-4380-a674-2a9ae4aba75e", + "type": "rse", + "relative_path": "/", + "name": "UKSRC_RAL_XRD", + "tier": 1 + }"#; + let area: StorageArea = serde_json::from_str(json).unwrap(); + assert_eq!(area.id, "ce04d165-4d5f-4380-a674-2a9ae4aba75e"); + assert_eq!(area.storage_type, "rse"); + assert_eq!(area.relative_path, "/"); + assert_eq!(area.name, "UKSRC_RAL_XRD"); + assert_eq!(area.tier, Some(1)); + } + + #[test] + fn storage_area_defaults_missing_optional_fields() { + let json = r#"{"id": "ce04d165-4d5f-4380-a674-2a9ae4aba75e"}"#; + let area: StorageArea = serde_json::from_str(json).unwrap(); + assert_eq!(area.id, "ce04d165-4d5f-4380-a674-2a9ae4aba75e"); + assert_eq!(area.name, ""); + assert_eq!(area.storage_type, ""); + assert_eq!(area.relative_path, ""); + assert!(area.tier.is_none()); + } +} From 744d99ec69b30a880be50eb63fdcd9dfc4988899 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 30 Mar 2026 16:24:42 +0100 Subject: [PATCH 19/27] chore(SOG-480): Add tests for mount module --- Cargo.lock | 1 + Cargo.toml | 1 + src/mount.rs | 402 +++++++++++++++++++++++++++++++++++++++++++++++---- 3 files changed, 377 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5621a85..d84455c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1405,6 +1405,7 @@ dependencies = [ "reqwest", "serde", "serde_json", + "tempfile", "thiserror", "tokio", ] diff --git a/Cargo.toml b/Cargo.toml index 2fba260..263a3e8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,3 +21,4 @@ libc = "0.2" [dev-dependencies] httpmock = "0.7" +tempfile = "3" diff --git a/src/mount.rs b/src/mount.rs index 6efcf27..6bfb78f 100644 --- a/src/mount.rs +++ b/src/mount.rs @@ -1,8 +1,38 @@ +//! Mount and unmount operations for making RSE data accessible to users. +//! +//! Uses `bindfs` and `mount --bind` to remap filesystem permissions, exposing a file from the RSE storage +//! at `/skadata` into the user's home directory under `~/projects//`. + use anyhow::{Context, Result}; use std::fs; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::process::Command; +/// Abstraction over system commands, allowing real system calls in production and mock system calls during testing. +trait Runner { + /// Executes an external command, returning an error if it exits non-zero. + /// + /// * `cmd` - The command to run (e.g. `"bindfs"`, `"mount"`, `"chown"`). + /// * `args` - Arguments to pass to the command. + /// * `description` - Human-readable label included in any error message. + fn run_command(&self, cmd: &str, args: &[&str], description: &str) -> Result<()>; + + /// Returns `true` if `path` is an active mount point. + fn is_mountpoint(&self, path: &Path) -> Result; +} + +/// Production [`Runner`] that delegates to the real system commands. +struct SystemRunner; + +impl Runner for SystemRunner { + fn run_command(&self, cmd: &str, args: &[&str], description: &str) -> Result<()> { + run_command(cmd, args, description) + } + fn is_mountpoint(&self, path: &Path) -> Result { + is_mountpoint(path) + } +} + /// Mounts a data file from the RSE storage to the user's home directory using bindfs. /// /// Creates necessary directories and bind mounts to make the data file accessible to the user @@ -30,16 +60,45 @@ use std::process::Command; /// mount_operation("/daac/08/06/2022-01-01_12-00-00.fits", "daac", "jsmith")?; /// ``` pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Result<()> { + mount_operation_impl( + data_path, + namespace, + sudo_user, + Path::new("/skadata"), + Path::new("/home"), + &SystemRunner, + ) +} + +/// Internal implementation of the mount operation, parameterized over the base paths and command runner for testing. +fn mount_operation_impl( + data_path: &str, + namespace: &str, + sudo_user: &str, + skadata_base: &Path, + home_base: &Path, + runner: &dyn Runner, +) -> Result<()> { + if !skadata_base.exists() { + anyhow::bail!( + "The RSE mount point {} does not exist on this host. \ + Please ensure the RSE is mounted to the host before using pathFinder.", + skadata_base.display() + ); + } + let data_path = Path::new(data_path); - let data_file = data_path.file_name() + let data_file = data_path + .file_name() .context("Invalid FITS path")? .to_str() .context("Invalid characters in filename that cannot be represented in UTF-8")?; - let data_dir = data_path.parent() + let data_dir = data_path + .parent() .and_then(|p| p.to_str()) .unwrap_or("") - .trim_start_matches('/'); // Strip leading slash for proper path joining + .trim_start_matches('/'); // Strip leading slash for proper path joining // Extract the bind name from the filename (remove extension) let bind_name = data_file @@ -47,21 +106,27 @@ pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Res .map(|(base, _)| base) .unwrap_or(data_file); - let home = PathBuf::from("/home").join(sudo_user); + let home = home_base.join(sudo_user); let bind_dir = home.join(".binds").join(bind_name); let projects_dir = home.join("projects").join(namespace); let projects_file = projects_dir.join(data_file); - // TODO: Read the SKA data base path (default: `/skadata`) from config or env variable instead of hardcoding - check it exists at startup - let skadata_src = PathBuf::from("/skadata").join(data_dir); + let skadata_src = skadata_base.join(data_dir); + let skadata_file = skadata_src.join(data_file); - // TODO: Check if already mounted - if so, check that the file is also mounted to the projects directory; if both true: bail - if is_mountpoint(&bind_dir)? { + if !skadata_file.exists() { anyhow::bail!( - "{} is already mounted.", - bind_dir.display() + "File '{}' not found at {}. The RSE may not be mounted at this site, \ + or the specific data may not have been staged here.", + data_file, + skadata_src.display() ); } + // TODO: Check if already mounted - if so, check that the file is also mounted to the projects directory; if both true: bail + if runner.is_mountpoint(&bind_dir)? { + anyhow::bail!("{} is already mounted.", bind_dir.display()); + } + // Create directories fs::create_dir_all(&bind_dir) .with_context(|| format!("Failed to create {}", bind_dir.display()))?; @@ -73,12 +138,17 @@ pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Res .create(true) .write(true) .open(&projects_file) - .with_context(|| format!("Failed to create placeholder file {}", projects_file.display()))?; + .with_context(|| { + format!( + "Failed to create placeholder file {}", + projects_file.display() + ) + })?; // Set ownership and permissions let user_group = format!("{}:{}", sudo_user, sudo_user); - run_command( + runner.run_command( "chown", &["-R", &user_group, home.join(".binds").to_str().unwrap()], "Set ownership of .binds directory", @@ -92,7 +162,7 @@ pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Res fs::set_permissions(&bind_dir, perms)?; } - run_command( + runner.run_command( "chown", &["-R", &user_group, projects_dir.to_str().unwrap()], "Set ownership of projects directory", @@ -107,7 +177,7 @@ pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Res } // Run bindfs - run_command( + runner.run_command( "bindfs", &[ "--perms=0700", @@ -121,7 +191,7 @@ pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Res // Bind mount the file let source_file = bind_dir.join(data_file); - run_command( + runner.run_command( "mount", &[ "--bind", @@ -132,7 +202,7 @@ pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Res )?; // Verify mount - if is_mountpoint(&projects_file)? { + if runner.is_mountpoint(&projects_file)? { println!( "Mount verification successful: {} is mounted at {}", data_file, @@ -149,9 +219,30 @@ pub fn mount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Res Ok(()) } +/// Unmounts a previously mounted data file and cleans up the associated directories. +/// +/// Unmounts the bind-mounted file at `~/projects//` and the +/// bindfs directory at `~/.binds/`, then removes both from the filesystem. +/// Unmount errors are ignored in case the paths are not currently mounted. +/// +/// # Arguments +/// * `data_path` - Full path to the data file on the RSE (used to derive the filename) +/// * `namespace` - The namespace the file belongs to (e.g. `"daac"`) +/// * `sudo_user` - The user whose home directory the mounts live under pub fn unmount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> Result<()> { + unmount_operation_impl(data_path, namespace, sudo_user, Path::new("/home")) +} + +/// Internal implementation of the unmount operation, parameterized over the home base path for testing. +fn unmount_operation_impl( + data_path: &str, + namespace: &str, + sudo_user: &str, + home_base: &Path, +) -> Result<()> { let data_path = Path::new(data_path); - let data_file = data_path.file_name() + let data_file = data_path + .file_name() .context("Invalid FITS path")? .to_str() .context("Invalid UTF-8 in filename")?; @@ -161,14 +252,22 @@ pub fn unmount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> R .map(|(base, _)| base) .unwrap_or(data_file); - let home = PathBuf::from("/home").join(sudo_user); + let home = home_base.join(sudo_user); let bind_dir = home.join(".binds").join(bind_name); let projects_dir = home.join("projects").join(namespace); let projects_file = projects_dir.join(data_file); // Unmount (ignore errors if not mounted) - let _ = run_command("umount", &[projects_file.to_str().unwrap()], "Unmount projects file"); - let _ = run_command("umount", &[bind_dir.to_str().unwrap()], "Unmount bind directory"); + let _ = run_command( + "umount", + &[projects_file.to_str().unwrap()], + "Unmount projects file", + ); + let _ = run_command( + "umount", + &[bind_dir.to_str().unwrap()], + "Unmount bind directory", + ); // Remove directories/files if bind_dir.exists() { @@ -186,6 +285,7 @@ pub fn unmount_operation(data_path: &str, namespace: &str, sudo_user: &str) -> R Ok(()) } +/// Returns `true` if the given path is a mount point, determined using the `mountpoint` command. fn is_mountpoint(path: &Path) -> Result { let output = Command::new("mountpoint") .arg("-q") @@ -196,8 +296,8 @@ fn is_mountpoint(path: &Path) -> Result { Ok(output.status.success()) } +/// Helper function to run a system command and return an error if it fails, including the command's stderr in the error message. fn run_command(cmd: &str, args: &[&str], description: &str) -> Result<()> { - let output = Command::new(cmd) .args(args) .output() @@ -205,12 +305,260 @@ fn run_command(cmd: &str, args: &[&str], description: &str) -> Result<()> { if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); - anyhow::bail!( - "{} failed: {}", - description, - stderr.trim() - ); + anyhow::bail!("{} failed: {}", description, stderr.trim()); } Ok(()) } + +#[cfg(test)] +mod tests { + use std::cell::Cell; + + use super::*; + use tempfile::TempDir; + + // Real-world data path as returned by the DM API locate endpoint. + const DATA_PATH: &str = "/daac/08/06/random10MiB.bin"; + const NAMESPACE: &str = "daac"; + const USER: &str = "jsmith"; + + // Populate skadata_base// so the existence check passes. + fn seed_skadata(skadata: &Path) { + let data_dir = skadata.join("daac/08/06"); + fs::create_dir_all(&data_dir).unwrap(); + fs::write(data_dir.join("random10MiB.bin"), b"").unwrap(); + } + + /// Mock runner that accepts all commands as no-ops. + /// + /// `is_mountpoint` returns `false` on the first call (the "already mounted?" guard) + /// and `true` on subsequent calls (the post-mount verification). + struct MockRunner { + mountpoint_calls: Cell, + } + + impl MockRunner { + fn new() -> Self { + Self { + mountpoint_calls: Cell::new(0), + } + } + } + + impl Runner for MockRunner { + fn run_command(&self, _cmd: &str, _args: &[&str], _description: &str) -> Result<()> { + Ok(()) + } + fn is_mountpoint(&self, _path: &Path) -> Result { + let n = self.mountpoint_calls.get(); + self.mountpoint_calls.set(n + 1); + Ok(n > 0) + } + } + + // --- mount: /skadata mount point --- + + #[test] + fn mount_errors_when_skadata_does_not_exist() { + let tmp = TempDir::new().unwrap(); + let skadata = tmp.path().join("skadata"); // intentionally not created + let home = tmp.path().join("home"); + + let err = mount_operation_impl(DATA_PATH, NAMESPACE, USER, &skadata, &home, &SystemRunner) + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("does not exist"), "{msg}"); + assert!(msg.contains("RSE"), "{msg}"); + } + + // --- mount: file not present in /skadata --- + + #[test] + fn mount_errors_when_file_not_staged_to_local_rse() { + let tmp = TempDir::new().unwrap(); + let skadata = tmp.path().join("skadata"); + fs::create_dir_all(&skadata).unwrap(); // skadata exists but file is absent + let home = tmp.path().join("home"); + + let err = mount_operation_impl(DATA_PATH, NAMESPACE, USER, &skadata, &home, &SystemRunner) + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("random10MiB.bin"), "{msg}"); + assert!(msg.contains("not found") || msg.contains("staged"), "{msg}"); + } + + #[test] + fn mount_errors_when_skadata_dir_exists_but_subdirectory_is_absent() { + // skadata exists but the namespace subdirectory (daac/08/06) does not + let tmp = TempDir::new().unwrap(); + let skadata = tmp.path().join("skadata"); + fs::create_dir_all(&skadata).unwrap(); + let home = tmp.path().join("home"); + + let err = mount_operation_impl( + "/daac/08/06/random10MiB.bin", + NAMESPACE, + USER, + &skadata, + &home, + &SystemRunner, + ) + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("random10MiB.bin"), "{msg}"); + } + + // --- mount: path edge cases --- + + #[test] + fn mount_errors_on_path_with_no_filename() { + let tmp = TempDir::new().unwrap(); + let skadata = tmp.path().join("skadata"); + fs::create_dir_all(&skadata).unwrap(); + let home = tmp.path().join("home"); + + // A path of "/" has no file_name component + let err = + mount_operation_impl("/", NAMESPACE, USER, &skadata, &home, &SystemRunner).unwrap_err(); + assert!(err.to_string().to_lowercase().contains("invalid"), "{err}"); + } + + #[test] + fn mount_errors_on_empty_data_path() { + let tmp = TempDir::new().unwrap(); + let skadata = tmp.path().join("skadata"); + fs::create_dir_all(&skadata).unwrap(); + let home = tmp.path().join("home"); + + let err = + mount_operation_impl("", NAMESPACE, USER, &skadata, &home, &SystemRunner).unwrap_err(); + // An empty string has no file_name + assert!(err.to_string().to_lowercase().contains("invalid"), "{err}"); + } + + // --- mount: golden path --- + + #[test] + fn mount_golden_path() { + let tmp = TempDir::new().unwrap(); + let skadata = tmp.path().join("skadata"); + seed_skadata(&skadata); + let home = tmp.path().join("home"); + + mount_operation_impl( + DATA_PATH, + NAMESPACE, + USER, + &skadata, + &home, + &MockRunner::new(), + ) + .unwrap(); + + let bind_dir = home.join(USER).join(".binds").join("random10MiB"); + let projects_file = home + .join(USER) + .join("projects") + .join(NAMESPACE) + .join("random10MiB.bin"); + assert!(bind_dir.exists(), "bind_dir should have been created"); + assert!( + projects_file.exists(), + "projects_file should have been created" + ); + } + + // --- unmount: path edge cases --- + + #[test] + fn unmount_errors_on_path_with_no_filename() { + let tmp = TempDir::new().unwrap(); + let home = tmp.path().join("home"); + + let err = unmount_operation_impl("/", NAMESPACE, USER, &home).unwrap_err(); + assert!(err.to_string().to_lowercase().contains("invalid"), "{err}"); + } + + // --- unmount: nothing mounted --- + + #[test] + fn unmount_succeeds_when_nothing_is_mounted() { + let tmp = TempDir::new().unwrap(); + let home = tmp.path().join("home"); + // Neither bind_dir nor projects_file exist — should succeed gracefully. + unmount_operation_impl(DATA_PATH, NAMESPACE, USER, &home).unwrap(); + } + + // --- unmount: cleanup --- + + #[test] + fn unmount_removes_bind_dir_and_projects_file() { + let tmp = TempDir::new().unwrap(); + let home = tmp.path().join("home"); + let bind_dir = home.join(USER).join(".binds").join("random10MiB"); + let projects_dir = home.join(USER).join("projects").join(NAMESPACE); + let projects_file = projects_dir.join("random10MiB.bin"); + + fs::create_dir_all(&bind_dir).unwrap(); + fs::create_dir_all(&projects_dir).unwrap(); + fs::write(&projects_file, b"").unwrap(); + + unmount_operation_impl(DATA_PATH, NAMESPACE, USER, &home).unwrap(); + + assert!(!bind_dir.exists(), "bind_dir should have been removed"); + assert!( + !projects_file.exists(), + "projects_file should have been removed" + ); + } + + #[test] + fn unmount_succeeds_when_only_bind_dir_exists() { + let tmp = TempDir::new().unwrap(); + let home = tmp.path().join("home"); + let bind_dir = home.join(USER).join(".binds").join("random10MiB"); + fs::create_dir_all(&bind_dir).unwrap(); + + unmount_operation_impl(DATA_PATH, NAMESPACE, USER, &home).unwrap(); + + assert!(!bind_dir.exists(), "bind_dir should have been removed"); + } + + #[test] + fn unmount_succeeds_when_only_projects_file_exists() { + let tmp = TempDir::new().unwrap(); + let home = tmp.path().join("home"); + let projects_dir = home.join(USER).join("projects").join(NAMESPACE); + let projects_file = projects_dir.join("random10MiB.bin"); + fs::create_dir_all(&projects_dir).unwrap(); + fs::write(&projects_file, b"").unwrap(); + + unmount_operation_impl(DATA_PATH, NAMESPACE, USER, &home).unwrap(); + + assert!( + !projects_file.exists(), + "projects_file should have been removed" + ); + } + + #[test] + fn unmount_leaves_other_files_in_projects_dir_intact() { + let tmp = TempDir::new().unwrap(); + let home = tmp.path().join("home"); + let projects_dir = home.join(USER).join("projects").join(NAMESPACE); + let target_file = projects_dir.join("random10MiB.bin"); + let other_file = projects_dir.join("other_file.bin"); + fs::create_dir_all(&projects_dir).unwrap(); + fs::write(&target_file, b"").unwrap(); + fs::write(&other_file, b"untouched").unwrap(); + + unmount_operation_impl(DATA_PATH, NAMESPACE, USER, &home).unwrap(); + + assert!(!target_file.exists(), "target file should be removed"); + assert!( + other_file.exists(), + "other file in same dir should be untouched" + ); + } +} From b3fe2c59e2e6739af01b00ce049ab32cfb317772 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 30 Mar 2026 16:41:53 +0100 Subject: [PATCH 20/27] chore(SOG-480): Add tests and docs to oauth2 module --- src/main.rs | 4 +- src/oauth2.rs | 699 +++++++++++++++++++++++++++++++++++++++++++++ src/oauth2_auth.rs | 264 ----------------- 3 files changed, 701 insertions(+), 266 deletions(-) create mode 100644 src/oauth2.rs delete mode 100644 src/oauth2_auth.rs diff --git a/src/main.rs b/src/main.rs index db755c5..2c45d34 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,7 @@ mod api_client; mod models; mod mount; -mod oauth2_auth; +mod oauth2; use anyhow::{Context, Result}; use clap::Parser; @@ -12,7 +12,7 @@ use std::process::exit; use api_client::{ApiClient, PathFinderApiClient}; use models::{DataLocation, StorageAreaIDToNodeAndSite}; -use oauth2_auth::{authenticate, Tokens}; +use oauth2::{authenticate, Tokens}; #[derive(Parser, Debug)] #[command(name = "path-finder")] diff --git a/src/oauth2.rs b/src/oauth2.rs new file mode 100644 index 0000000..5973b9b --- /dev/null +++ b/src/oauth2.rs @@ -0,0 +1,699 @@ +//! OAuth2 device code flow authentication for the SRCNet APIs. +//! +//! Implements the device authorisation grant. The user is directed to a +//! browser URL to authenticate; once approved the resulting OIDC token is exchanged +//! for API-specific access tokens for the Data Management and Site Capabilities APIs. +//! Tokens are cached on disk (mode `0600`) and reused until they expire. + +use anyhow::{Context, Result}; +use reqwest::blocking::Client; +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::{Path, PathBuf}; +use std::thread; +use std::time::{Duration, SystemTime}; + +const AUTHN_BASE_URL: &str = "https://authn.srcnet.skao.int/api/v1"; +const DATA_MANAGEMENT: &str = "data-management-api"; +const SITE_CAPABILITIES: &str = "site-capabilities-api"; + +/// API access tokens for the Data Management and Site Capabilities APIs. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Tokens { + pub data_management_token: String, + pub site_capabilities_token: String, +} + +/// Response from the device code initiation endpoint (`GET /login/device`). +#[derive(Debug, Deserialize)] +struct DeviceCodeResponse { + /// Opaque code used to poll for the access token. + device_code: String, + /// Short human-readable code shown to the user. + user_code: String, + /// URL the user must visit to complete authentication. + verification_uri: String, + /// Recommended polling interval in seconds (default: 5). + #[serde(default = "default_interval")] + interval: u64, + /// Token lifetime in seconds — present in the response but not used by this client. + #[serde(default)] + _expires_in: Option, +} + +fn default_interval() -> u64 { + 5 +} + +/// Response from the token polling endpoint (`GET /token`) and the token exchange endpoint. +#[derive(Debug, Deserialize)] +struct TokenResponse { + /// Nested token object returned by some API versions. + token: Option, + /// Flat access token returned by other API versions. + access_token: Option, + /// OAuth2 error code (e.g. `"authorization_pending"`, `"slow_down"`). + error: Option, + /// Human-readable elaboration of `error`. + error_description: Option, + /// Detail string that may embed a JSON error payload. + detail: Option, +} + +/// Nested token object within a [`TokenResponse`]. +#[derive(Debug, Deserialize)] +struct TokenData { + access_token: String, +} + +/// Tokens serialised to the on-disk cache file, together with their expiry timestamp. +#[derive(Debug, Serialize, Deserialize)] +struct CachedTokens { + tokens: Tokens, + /// Unix timestamp (seconds since the epoch) after which the tokens are considered expired. + expires_at: u64, +} + +/// Authenticates the user via the OAuth2 device code flow and returns API access tokens. +/// +/// If `use_cache` is `true` and a valid (non-expired) token set exists on disk, those tokens +/// are returned immediately without prompting the user. Otherwise the full device-code flow is +/// performed: the user is directed to a browser URL, and once authenticated the resulting tokens +/// are cached for subsequent calls. +pub fn authenticate(use_cache: bool) -> Result { + authenticate_impl(use_cache, AUTHN_BASE_URL, None) +} + +/// Inner implementation of [`authenticate`] with injectable base URL and cache path for testing. +fn authenticate_impl(use_cache: bool, base_url: &str, cache_path: Option<&Path>) -> Result { + if use_cache { + if let Some(cached) = load_tokens(cache_path)? { + return Ok(cached); + } + } + + let client = Client::new(); + + let device_info = initiate_device_code_flow(&client, base_url)?; + display_user_instructions(&device_info); + + let auth_token = poll_for_authentication( + &client, + base_url, + &device_info.device_code, + device_info.interval, + )?; + + let dm_token = exchange_token_for_api_token(&client, base_url, &auth_token, DATA_MANAGEMENT)?; + let sc_token = exchange_token_for_api_token(&client, base_url, &auth_token, SITE_CAPABILITIES)?; + + let tokens = Tokens { + data_management_token: dm_token, + site_capabilities_token: sc_token, + }; + + save_tokens(cache_path, &tokens, 3600)?; + + Ok(tokens) +} + +/// Initiates the device code flow by calling `GET /login/device`. +/// +/// Returns the server's [`DeviceCodeResponse`] containing the `device_code` to poll with +/// and the `verification_uri` + `user_code` to display to the user. +fn initiate_device_code_flow(client: &Client, base_url: &str) -> Result { + let url = format!("{}/login/device", base_url); + let response = client + .get(&url) + .timeout(Duration::from_secs(10)) + .send() + .context("Failed to initiate device code flow")?; + + response + .error_for_status() + .context("Device code flow request failed")? + .json() + .context("Failed to parse device code response") +} + +/// Prints the authentication URL and user code to stdout so the user knows where to go. +fn display_user_instructions(device_info: &DeviceCodeResponse) { + println!("\nACTION REQUIRED:"); + println!( + " Open this URL in a browser and authenticate: {}?user_code={}", + device_info.verification_uri, device_info.user_code + ); + println!("\nWaiting for authentication (timeout: 5 minutes)..."); +} + +/// Polls `GET /token` until the user authorises the device or the 5-minute timeout is reached. +/// +/// Handles the following RFC 8628 polling error codes: +/// - `authorization_pending` — keeps polling at the current interval +/// - `slow_down` — backs off by 5 seconds and keeps polling +/// - `expired_token` / `access_denied` — bails immediately with a descriptive error +fn poll_for_authentication( + client: &Client, + base_url: &str, + device_code: &str, + mut interval: u64, +) -> Result { + let timeout = Duration::from_secs(300); + let start = SystemTime::now(); + + loop { + if start.elapsed()? > timeout { + anyhow::bail!("Authorization timeout. Please try again."); + } + + let url = format!("{}/token", base_url); + let response = client + .get(&url) + .query(&[("device_code", device_code)]) + .timeout(Duration::from_secs(10)) + .send() + .context("Failed to poll for authentication")?; + + if response.status().is_success() { + let token_data: TokenResponse = response.json()?; + + if let Some(token) = token_data.token { + return Ok(token.access_token); + } else if let Some(access_token) = token_data.access_token { + return Ok(access_token); + } else { + anyhow::bail!("No access token in response"); + } + } + + let error_data: TokenResponse = response.json()?; + let error = parse_error_response(&error_data); + + match error.as_deref() { + Some("authorization_pending") => { + thread::sleep(Duration::from_secs(interval)); + continue; + } + Some("slow_down") => { + interval += 5; + thread::sleep(Duration::from_secs(interval)); + continue; + } + Some("expired_token") => { + anyhow::bail!("Device code expired. Please try again."); + } + Some("access_denied") => { + anyhow::bail!("User denied authorization."); + } + Some(err) => { + let msg = error_data + .error_description + .map(|d| format!("{}: {}", err, d)) + .unwrap_or_else(|| err.to_string()); + anyhow::bail!("Authorization error: {}", msg); + } + None => { + anyhow::bail!("Unknown authorization error"); + } + } + } +} + +/// Extracts the OAuth2 error code from a [`TokenResponse`]. +/// +/// Some API responses embed a JSON error payload inside a `detail` string of the form +/// `"response: {...}"`. This function attempts to parse that first before falling back +/// to the top-level `error` field. +fn parse_error_response(error_data: &TokenResponse) -> Option { + if let Some(detail) = &error_data.detail { + // Try to extract JSON from "response: {...}" pattern + if let Some(start) = detail.find("response:") { + let json_part = detail[start + 9..].trim(); + if let Ok(embedded) = serde_json::from_str::(json_part) { + if embedded.error.is_some() { + return embedded.error; + } + } + } + } + error_data.error.clone() +} + +/// Exchanges the OIDC auth token for an API-specific access token via +/// `GET /token/exchange/`. +fn exchange_token_for_api_token( + client: &Client, + base_url: &str, + auth_token: &str, + api_name: &str, +) -> Result { + let url = format!("{}/token/exchange/{}", base_url, api_name); + let response = client + .get(&url) + .header("Content-Type", "application/json") + .query(&[ + ("version", "latest"), + ("try_use_cache", "false"), + ("access_token", auth_token), + ]) + .timeout(Duration::from_secs(10)) + .send() + .with_context(|| format!("Failed to exchange token for {} API", api_name))?; + + let token_data: TokenResponse = response + .error_for_status() + .with_context(|| format!("Token exchange failed for {}", api_name))? + .json()?; + + if let Some(token) = token_data.token { + Ok(token.access_token) + } else if let Some(access_token) = token_data.access_token { + Ok(access_token) + } else { + anyhow::bail!("No access token in response for {}", api_name) + } +} + +/// Returns the path to the on-disk token cache file, creating intermediate directories if needed. +fn get_token_cache_path() -> Result { + let config_dir = dirs::config_dir() + .context("Failed to find config directory")? + .join("path-finder"); + + fs::create_dir_all(&config_dir)?; + Ok(config_dir.join("tokens.json")) +} + +/// Saves tokens to the cache, using `cache_path` if provided or the default path otherwise. +fn save_tokens(cache_path: Option<&Path>, tokens: &Tokens, expires_in: u64) -> Result<()> { + let default_path; + let path = match cache_path { + Some(p) => p, + None => { + default_path = get_token_cache_path()?; + &default_path + } + }; + save_tokens_to_path(tokens, expires_in, path) +} + +/// Serialises `tokens` to `path` with a Unix timestamp expiry, setting file permissions to `0600`. +fn save_tokens_to_path(tokens: &Tokens, expires_in: u64, path: &Path) -> Result<()> { + let expires_at = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH)? + .as_secs() + + expires_in; + + let cached = CachedTokens { + tokens: tokens.clone(), + expires_at, + }; + + let json = serde_json::to_string_pretty(&cached)?; + fs::write(path, json)?; + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = fs::metadata(path)?.permissions(); + perms.set_mode(0o600); + fs::set_permissions(path, perms)?; + } + + println!("Tokens cached for {} seconds", expires_in); + Ok(()) +} + +/// Loads tokens from the cache at `cache_path`, or the default path if `None`. +/// +/// Returns `None` if the cache file does not exist or the tokens have expired. +fn load_tokens(cache_path: Option<&Path>) -> Result> { + let default_path; + let path = match cache_path { + Some(p) => p, + None => { + default_path = get_token_cache_path()?; + &default_path + } + }; + load_tokens_from_path(path) +} + +/// Reads and deserialises tokens from `path`, returning `None` if absent or expired. +fn load_tokens_from_path(path: &Path) -> Result> { + if !path.exists() { + return Ok(None); + } + + let contents = fs::read_to_string(path)?; + let cached: CachedTokens = serde_json::from_str(&contents).context("Invalid cache file")?; + + let now = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH)? + .as_secs(); + + if now >= cached.expires_at { + println!("Cached tokens expired"); + return Ok(None); + } + + println!("Using cached tokens"); + Ok(Some(cached.tokens)) +} + +#[cfg(test)] +mod tests { + use super::*; + use httpmock::prelude::*; + use tempfile::TempDir; + + fn make_tokens() -> Tokens { + Tokens { + data_management_token: "dm-token-abc".to_string(), + site_capabilities_token: "sc-token-xyz".to_string(), + } + } + + // --- parse_error_response --- + + #[test] + fn parse_error_returns_none_when_no_error_fields() { + let resp = TokenResponse { + token: None, + access_token: None, + error: None, + error_description: None, + detail: None, + }; + assert!(parse_error_response(&resp).is_none()); + } + + #[test] + fn parse_error_returns_top_level_error_field() { + let resp = TokenResponse { + token: None, + access_token: None, + error: Some("access_denied".to_string()), + error_description: None, + detail: None, + }; + assert_eq!( + parse_error_response(&resp).as_deref(), + Some("access_denied") + ); + } + + #[test] + fn parse_error_extracts_error_from_embedded_detail_json() { + let resp = TokenResponse { + token: None, + access_token: None, + error: None, + error_description: None, + detail: Some( + r#"response: {"error": "authorization_pending", "error_description": null}"# + .to_string(), + ), + }; + assert_eq!( + parse_error_response(&resp).as_deref(), + Some("authorization_pending") + ); + } + + #[test] + fn parse_error_falls_back_to_top_level_when_detail_json_is_malformed() { + let resp = TokenResponse { + token: None, + access_token: None, + error: Some("slow_down".to_string()), + error_description: None, + detail: Some("response: not valid json at all".to_string()), + }; + assert_eq!(parse_error_response(&resp).as_deref(), Some("slow_down")); + } + + #[test] + fn parse_error_prefers_embedded_detail_over_top_level_error() { + let resp = TokenResponse { + token: None, + access_token: None, + error: Some("top_level_error".to_string()), + error_description: None, + detail: Some(r#"response: {"error": "embedded_error"}"#.to_string()), + }; + assert_eq!( + parse_error_response(&resp).as_deref(), + Some("embedded_error") + ); + } + + // --- token cache round-trip --- + + #[test] + fn cache_round_trip_returns_same_tokens() { + let tmp = TempDir::new().unwrap(); + let path = tmp.path().join("tokens.json"); + let tokens = make_tokens(); + + save_tokens_to_path(&tokens, 3600, &path).unwrap(); + let loaded = load_tokens_from_path(&path).unwrap().unwrap(); + + assert_eq!(loaded.data_management_token, tokens.data_management_token); + assert_eq!( + loaded.site_capabilities_token, + tokens.site_capabilities_token + ); + } + + #[test] + fn load_tokens_returns_none_when_file_absent() { + let tmp = TempDir::new().unwrap(); + let path = tmp.path().join("tokens.json"); + + let result = load_tokens_from_path(&path).unwrap(); + assert!(result.is_none()); + } + + #[test] + fn load_tokens_returns_none_when_expired() { + let tmp = TempDir::new().unwrap(); + let path = tmp.path().join("tokens.json"); + + // Write a cache file whose expires_at is already in the past. + let expired = CachedTokens { + tokens: make_tokens(), + expires_at: 1, // 1970 — definitely expired + }; + fs::write(&path, serde_json::to_string(&expired).unwrap()).unwrap(); + + let result = load_tokens_from_path(&path).unwrap(); + assert!(result.is_none()); + } + + #[test] + fn load_tokens_errors_on_malformed_json() { + let tmp = TempDir::new().unwrap(); + let path = tmp.path().join("tokens.json"); + fs::write(&path, "not json").unwrap(); + + let err = load_tokens_from_path(&path).unwrap_err(); + assert!(err.to_string().contains("Invalid cache file"), "{err}"); + } + + // --- initiate_device_code_flow --- + + #[test] + fn initiate_device_code_flow_parses_success_response() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/login/device"); + then.status(200).body( + r#"{ + "device_code": "dev-code-abc", + "user_code": "ABCD-1234", + "verification_uri": "https://authn.srcnet.skao.int/device", + "interval": 5 + }"#, + ); + }); + + let client = Client::new(); + let resp = initiate_device_code_flow(&client, &server.base_url()).unwrap(); + assert_eq!(resp.device_code, "dev-code-abc"); + assert_eq!(resp.user_code, "ABCD-1234"); + assert_eq!(resp.interval, 5); + } + + #[test] + fn initiate_device_code_flow_uses_default_interval_when_absent() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/login/device"); + then.status(200).body( + r#"{ + "device_code": "dev-code-abc", + "user_code": "ABCD-1234", + "verification_uri": "https://authn.srcnet.skao.int/device" + }"#, + ); + }); + + let client = Client::new(); + let resp = initiate_device_code_flow(&client, &server.base_url()).unwrap(); + assert_eq!(resp.interval, 5); + } + + #[test] + fn initiate_device_code_flow_propagates_http_error() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/login/device"); + then.status(500); + }); + + let client = Client::new(); + let err = initiate_device_code_flow(&client, &server.base_url()).unwrap_err(); + assert!( + err.to_string().contains("Device code flow request failed"), + "{err}" + ); + } + + // --- poll_for_authentication --- + + #[test] + fn poll_returns_nested_token_on_success() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/token"); + then.status(200) + .body(r#"{"token": {"access_token": "oidc-token-abc"}}"#); + }); + + let client = Client::new(); + let token = poll_for_authentication(&client, &server.base_url(), "dev-code", 0).unwrap(); + assert_eq!(token, "oidc-token-abc"); + } + + #[test] + fn poll_returns_flat_access_token_on_success() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/token"); + then.status(200) + .body(r#"{"access_token": "oidc-token-flat"}"#); + }); + + let client = Client::new(); + let token = poll_for_authentication(&client, &server.base_url(), "dev-code", 0).unwrap(); + assert_eq!(token, "oidc-token-flat"); + } + + #[test] + fn poll_errors_on_expired_token() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/token"); + then.status(400).body(r#"{"error": "expired_token"}"#); + }); + + let client = Client::new(); + let err = poll_for_authentication(&client, &server.base_url(), "dev-code", 0).unwrap_err(); + assert!(err.to_string().contains("expired"), "{err}"); + } + + #[test] + fn poll_errors_on_access_denied() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/token"); + then.status(400).body(r#"{"error": "access_denied"}"#); + }); + + let client = Client::new(); + let err = poll_for_authentication(&client, &server.base_url(), "dev-code", 0).unwrap_err(); + assert!(err.to_string().contains("denied"), "{err}"); + } + + // --- exchange_token_for_api_token --- + + #[test] + fn exchange_token_returns_nested_token() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/token/exchange/data-management-api"); + then.status(200) + .body(r#"{"token": {"access_token": "dm-token-abc"}}"#); + }); + + let client = Client::new(); + let token = exchange_token_for_api_token( + &client, + &server.base_url(), + "oidc-token", + "data-management-api", + ) + .unwrap(); + assert_eq!(token, "dm-token-abc"); + } + + #[test] + fn exchange_token_returns_flat_access_token() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET) + .path("/token/exchange/site-capabilities-api"); + then.status(200) + .body(r#"{"access_token": "sc-token-flat"}"#); + }); + + let client = Client::new(); + let token = exchange_token_for_api_token( + &client, + &server.base_url(), + "oidc-token", + "site-capabilities-api", + ) + .unwrap(); + assert_eq!(token, "sc-token-flat"); + } + + #[test] + fn exchange_token_propagates_http_error() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/token/exchange/data-management-api"); + then.status(401); + }); + + let client = Client::new(); + let err = exchange_token_for_api_token( + &client, + &server.base_url(), + "oidc-token", + "data-management-api", + ) + .unwrap_err(); + assert!(err.to_string().contains("Token exchange failed"), "{err}"); + } + + #[test] + fn exchange_token_errors_when_no_token_in_response() { + let server = MockServer::start(); + server.mock(|when, then| { + when.method(GET).path("/token/exchange/data-management-api"); + then.status(200).body(r#"{"detail": "some_other_field"}"#); + }); + + let client = Client::new(); + let err = exchange_token_for_api_token( + &client, + &server.base_url(), + "oidc-token", + "data-management-api", + ) + .unwrap_err(); + assert!(err.to_string().contains("No access token"), "{err}"); + } +} diff --git a/src/oauth2_auth.rs b/src/oauth2_auth.rs deleted file mode 100644 index d25d375..0000000 --- a/src/oauth2_auth.rs +++ /dev/null @@ -1,264 +0,0 @@ -use anyhow::{Context, Result}; -use reqwest::blocking::Client; -use serde::{Deserialize, Serialize}; -use std::fs; -use std::path::PathBuf; -use std::thread; -use std::time::{Duration, SystemTime}; - -const AUTHN_BASE_URL: &str = "https://authn.srcnet.skao.int/api/v1"; -const DATA_MANAGEMENT: &str = "data-management-api"; -const SITE_CAPABILITIES: &str = "site-capabilities-api"; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct Tokens { - pub data_management_token: String, - pub site_capabilities_token: String, -} - -#[derive(Debug, Deserialize)] -struct DeviceCodeResponse { - device_code: String, - user_code: String, - verification_uri: String, - #[serde(default = "default_interval")] - interval: u64, - #[serde(default)] - _expires_in: Option, -} - -fn default_interval() -> u64 { - 5 -} - -#[derive(Debug, Deserialize)] -struct TokenResponse { - token: Option, - access_token: Option, - error: Option, - error_description: Option, - detail: Option, -} - -#[derive(Debug, Deserialize)] -struct TokenData { - access_token: String, -} - -#[derive(Debug, Serialize, Deserialize)] -struct CachedTokens { - tokens: Tokens, - expires_at: u64, -} - -pub fn authenticate(use_cache: bool) -> Result { - if use_cache { - if let Some(cached) = load_tokens_from_cache()? { - return Ok(cached); - } - } - - let client = Client::new(); - - let device_info = initiate_device_code_flow(&client)?; - display_user_instructions(&device_info); - - let auth_token = poll_for_authentication(&client, &device_info.device_code, device_info.interval)?; - - let dm_token = exchange_token_for_api_token(&client, &auth_token, DATA_MANAGEMENT)?; - let sc_token = exchange_token_for_api_token(&client, &auth_token, SITE_CAPABILITIES)?; - - let tokens = Tokens { - data_management_token: dm_token, - site_capabilities_token: sc_token, - }; - - save_tokens_to_cache(&tokens, 3600)?; - - Ok(tokens) -} - -fn initiate_device_code_flow(client: &Client) -> Result { - let url = format!("{}/login/device", AUTHN_BASE_URL); - let response = client - .get(&url) - .timeout(Duration::from_secs(10)) - .send() - .context("Failed to initiate device code flow")?; - - response - .error_for_status() - .context("Device code flow request failed")? - .json() - .context("Failed to parse device code response") -} - -fn display_user_instructions(device_info: &DeviceCodeResponse) { - println!("\nACTION REQUIRED:"); - println!(" Open this URL in a browser and authenticate: {}?user_code={}", - device_info.verification_uri, device_info.user_code); - println!("\nWaiting for authentication (timeout: 5 minutes)..."); -} - -fn poll_for_authentication(client: &Client, device_code: &str, mut interval: u64) -> Result { - let timeout = Duration::from_secs(300); - let start = SystemTime::now(); - - loop { - if start.elapsed()? > timeout { - anyhow::bail!("Authorization timeout. Please try again."); - } - - let url = format!("{}/token", AUTHN_BASE_URL); - let response = client - .get(&url) - .query(&[("device_code", device_code)]) - .timeout(Duration::from_secs(10)) - .send() - .context("Failed to poll for authentication")?; - - if response.status().is_success() { - let token_data: TokenResponse = response.json()?; - - if let Some(token) = token_data.token { - return Ok(token.access_token); - } else if let Some(access_token) = token_data.access_token { - return Ok(access_token); - } else { - anyhow::bail!("No access token in response"); - } - } - - let error_data: TokenResponse = response.json()?; - let error = parse_error_response(&error_data); - - match error.as_deref() { - Some("authorization_pending") => { - thread::sleep(Duration::from_secs(interval)); - continue; - } - Some("slow_down") => { - interval += 5; - thread::sleep(Duration::from_secs(interval)); - continue; - } - Some("expired_token") => { - anyhow::bail!("Device code expired. Please try again."); - } - Some("access_denied") => { - anyhow::bail!("User denied authorization."); - } - Some(err) => { - let msg = error_data.error_description - .map(|d| format!("{}: {}", err, d)) - .unwrap_or_else(|| err.to_string()); - anyhow::bail!("Authorization error: {}", msg); - } - None => { - anyhow::bail!("Unknown authorization error"); - } - } - } -} - -fn parse_error_response(error_data: &TokenResponse) -> Option { - if let Some(detail) = &error_data.detail { - // Try to extract JSON from "response: {...}" pattern - if let Some(start) = detail.find("response:") { - let json_part = &detail[start + 9..].trim(); - if let Ok(embedded) = serde_json::from_str::(json_part) { - if embedded.error.is_some() { - return embedded.error; - } - } - } - } - error_data.error.clone() -} - -fn exchange_token_for_api_token(client: &Client, auth_token: &str, api_name: &str) -> Result { - let url = format!("{}/token/exchange/{}", AUTHN_BASE_URL, api_name); - let response = client - .get(&url) - .header("Content-Type", "application/json") - .query(&[ - ("version", "latest"), - ("try_use_cache", "false"), - ("access_token", auth_token), - ]) - .timeout(Duration::from_secs(10)) - .send() - .with_context(|| format!("Failed to exchange token for {} API", api_name))?; - - let token_data: TokenResponse = response - .error_for_status() - .with_context(|| format!("Token exchange failed for {}", api_name))? - .json()?; - - if let Some(token) = token_data.token { - Ok(token.access_token) - } else if let Some(access_token) = token_data.access_token { - Ok(access_token) - } else { - anyhow::bail!("No access token in response for {}", api_name) - } -} - -fn get_token_cache_path() -> Result { - let config_dir = dirs::config_dir() - .context("Failed to find config directory")? - .join("path-finder"); - - fs::create_dir_all(&config_dir)?; - Ok(config_dir.join("tokens.json")) -} - -fn save_tokens_to_cache(tokens: &Tokens, expires_in: u64) -> Result<()> { - let cache_path = get_token_cache_path()?; - let expires_at = SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH)? - .as_secs() + expires_in; - - let cached = CachedTokens { - tokens: tokens.clone(), - expires_at, - }; - - let json = serde_json::to_string_pretty(&cached)?; - fs::write(&cache_path, json)?; - - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let mut perms = fs::metadata(&cache_path)?.permissions(); - perms.set_mode(0o600); - fs::set_permissions(&cache_path, perms)?; - } - - println!("Tokens cached for {} seconds", expires_in); - Ok(()) -} - -fn load_tokens_from_cache() -> Result> { - let cache_path = get_token_cache_path()?; - - if !cache_path.exists() { - return Ok(None); - } - - let contents = fs::read_to_string(&cache_path)?; - let cached: CachedTokens = serde_json::from_str(&contents) - .context("Invalid cache file")?; - - let now = SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH)? - .as_secs(); - - if now >= cached.expires_at { - println!("Cached tokens expired"); - return Ok(None); - } - - println!("Using cached tokens"); - Ok(Some(cached.tokens)) -} From 1524d277143dfde620b0d63e55bc3a5587eecb42 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 30 Mar 2026 17:06:06 +0100 Subject: [PATCH 21/27] refactor(SOG-480): Split up the main module into main, cli and path_finder modules --- src/cli.rs | 85 +++++++++++++++++++++ src/main.rs | 183 ++------------------------------------------- src/path_finder.rs | 117 +++++++++++++++++++++++++++++ 3 files changed, 210 insertions(+), 175 deletions(-) create mode 100644 src/cli.rs create mode 100644 src/path_finder.rs diff --git a/src/cli.rs b/src/cli.rs new file mode 100644 index 0000000..549e698 --- /dev/null +++ b/src/cli.rs @@ -0,0 +1,85 @@ +//! CLI argument parsing and environment bootstrapping. + +use anyhow::{Context, Result}; +use clap::Parser; +use std::env; + +use crate::oauth2::Tokens; + +/// Command-line arguments for pathFinder. +#[derive(Parser, Debug)] +#[command(name = "path-finder")] +#[command(about = "A tool for finding SKA data paths for mounting purposes")] +pub struct Args { + /// Namespace of the data + #[arg(long)] + pub namespace: String, + + /// Name of the data file + #[arg(long)] + pub file_name: String, + + /// Do not use OAuth2 for authentication - use environment variables instead + #[arg(long)] + pub no_login: bool, + + /// Unmount previously mounted data instead of mounting + #[arg(long)] + pub unmount: bool, +} + +/// Checks that the process is running as root via `sudo` and that `SUDO_USER` is set. +/// +/// Exits early with a helpful message if not running with sufficient privileges. +pub fn check_privileges(args: &Args) -> Result<()> { + // Check for root privileges early to avoid wasting time on API calls + #[cfg(unix)] + { + let euid = unsafe { libc::geteuid() }; + if euid != 0 { + eprintln!("\nError: This tool requires root privileges for mount/unmount operations."); + eprintln!("Please re-run with sudo:"); + if args.unmount { + eprintln!( + " sudo -E path-finder --namespace {} --file_name {} --unmount", + args.namespace, args.file_name + ); + } else { + eprintln!( + " sudo -E path-finder --namespace {} --file_name {}", + args.namespace, args.file_name + ); + } + anyhow::bail!("Insufficient privileges - sudo required"); + } + + // Verify SUDO_USER is set + if env::var("SUDO_USER").is_err() { + eprintln!("\nWarning: SUDO_USER not set. Are you running as root directly?"); + eprintln!("Please use 'sudo' rather than running as root user."); + anyhow::bail!("SUDO_USER environment variable not set"); + } + } + + #[cfg(not(unix))] + { + anyhow::bail!("This tool is only supported on Unix systems"); + } + + Ok(()) +} + +/// Reads API access tokens from the `DATA_MANAGEMENT_ACCESS_TOKEN` and +/// `SITE_CAPABILITIES_ACCESS_TOKEN` environment variables. +pub fn get_tokens_from_env() -> Result { + let dm_token = env::var("DATA_MANAGEMENT_ACCESS_TOKEN") + .context("Please set DATA_MANAGEMENT_ACCESS_TOKEN environment variable or omit --no-login to use OAuth2")?; + + let sc_token = env::var("SITE_CAPABILITIES_ACCESS_TOKEN") + .context("Please set SITE_CAPABILITIES_ACCESS_TOKEN environment variable or omit --no-login to use OAuth2")?; + + Ok(Tokens { + data_management_token: dm_token, + site_capabilities_token: sc_token, + }) +} diff --git a/src/main.rs b/src/main.rs index 2c45d34..6b4c931 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,40 +1,19 @@ mod api_client; +mod cli; mod models; mod mount; mod oauth2; +mod path_finder; use anyhow::{Context, Result}; use clap::Parser; -use regex::Regex; -use std::collections::HashSet; use std::env; use std::process::exit; use api_client::{ApiClient, PathFinderApiClient}; -use models::{DataLocation, StorageAreaIDToNodeAndSite}; +use cli::{Args, check_privileges, get_tokens_from_env}; use oauth2::{authenticate, Tokens}; -#[derive(Parser, Debug)] -#[command(name = "path-finder")] -#[command(about = "A tool for finding SKA data paths for mounting purposes")] -struct Args { - /// Namespace of the data - #[arg(long)] - namespace: String, - - /// Name of the data file - #[arg(long)] - file_name: String, - - /// Do not use OAuth2 for authentication - use environment variables instead - #[arg(long)] - no_login: bool, - - /// Unmount previously mounted data instead of mounting - #[arg(long)] - unmount: bool, -} - fn main() -> Result<()> { let args = Args::parse(); @@ -62,57 +41,6 @@ fn main() -> Result<()> { run(&args.namespace, &args.file_name, &tokens) } -fn check_privileges(args: &Args) -> Result<()> { - // Check for root privileges early to avoid wasting time on API calls - #[cfg(unix)] - { - let euid = unsafe { libc::geteuid() }; - if euid != 0 { - eprintln!("\nError: This tool requires root privileges for mount/unmount operations."); - eprintln!("Please re-run with sudo:"); - if args.unmount { - eprintln!( - " sudo -E path-finder --namespace {} --file_name {} --unmount", - args.namespace, args.file_name - ); - } else { - eprintln!( - " sudo -E path-finder --namespace {} --file_name {}", - args.namespace, args.file_name - ); - } - anyhow::bail!("Insufficient privileges - sudo required"); - } - - // Verify SUDO_USER is set - if env::var("SUDO_USER").is_err() { - eprintln!("\nWarning: SUDO_USER not set. Are you running as root directly?"); - eprintln!("Please use 'sudo' rather than running as root user."); - anyhow::bail!("SUDO_USER environment variable not set"); - } - } - - #[cfg(not(unix))] - { - anyhow::bail!("This tool is only supported on Unix systems"); - } - - Ok(()) -} - -fn get_tokens_from_env() -> Result { - let dm_token = env::var("DATA_MANAGEMENT_ACCESS_TOKEN") - .context("Please set DATA_MANAGEMENT_ACCESS_TOKEN environment variable or omit --no-login to use OAuth2")?; - - let sc_token = env::var("SITE_CAPABILITIES_ACCESS_TOKEN") - .context("Please set SITE_CAPABILITIES_ACCESS_TOKEN environment variable or omit --no-login to use OAuth2")?; - - Ok(Tokens { - data_management_token: dm_token, - site_capabilities_token: sc_token, - }) -} - fn run(namespace: &str, file_name: &str, tokens: &Tokens) -> Result<()> { let client = ApiClient::new( tokens.data_management_token.clone(), @@ -124,119 +52,24 @@ fn run(namespace: &str, file_name: &str, tokens: &Tokens) -> Result<()> { let site_storages = client.site_storage_areas()?; let data_locations = client.locate_data(namespace, file_name)?; - print_data_locations_with_sites(&site_storages, &data_locations); + path_finder::print_data_locations_with_sites(&site_storages, &data_locations); - let rse_path = extract_rse_path(&data_locations, namespace, file_name)?; + let rse_path = path_finder::extract_rse_path(&data_locations, namespace, file_name)?; println!( "RSE Path for file '{}' in namespace '{}': {}", file_name, namespace, rse_path ); // Check if the file exists locally - if !check_local_file_exists(&rse_path) { + if !path_finder::check_local_file_exists(&rse_path) { println!("\n⚠️ File not found locally! ⚠️"); println!("\nThe file is available at the following locations:"); - print_data_locations_with_sites(&site_storages, &data_locations); + path_finder::print_data_locations_with_sites(&site_storages, &data_locations); println!("\nPlease ensure the data has been staged to this local site before mounting."); exit(1); } - mount_data(&rse_path, namespace)?; - - Ok(()) -} - -fn print_data_locations_with_sites( - site_stores: &StorageAreaIDToNodeAndSite, - data_locations: &[DataLocation], -) { - for location in data_locations { - if let Some((node_name, site_name, area_name)) = - site_stores.get(&location.associated_storage_area_id) - { - println!( - "Data location ID: {}, Storage Area: {} ({}), Node: {}, Site: {}", - location.identifier, - area_name, - location.associated_storage_area_id, - node_name, - site_name - ); - } else { - println!( - "Data location ID: {}, Storage Area ID: {}, Node/Site: Not found", - location.identifier, location.associated_storage_area_id - ); - } - } -} - -fn check_local_file_exists(rse_path: &str) -> bool { - use std::path::Path; - let local_path = format!("/skadata{}", rse_path); - Path::new(&local_path).exists() -} - -fn extract_rse_path( - data_locations: &[DataLocation], - namespace: &str, - file_name: &str, -) -> Result { - let pattern = format!(r"/{}/.*$", regex::escape(namespace)); - let rse_path_regex = Regex::new(&pattern)?; - - let mut matched_paths = HashSet::new(); - let mut unmatched_paths = Vec::new(); - - for location in data_locations { - for uri in &location.replicas { - if let Some(captures) = rse_path_regex.find(uri) { - matched_paths.insert(captures.as_str().to_string()); - } else { - unmatched_paths.push(uri.clone()); - } - } - } - - if !unmatched_paths.is_empty() { - println!( - "Warning: {} URIs did not match the expected pattern.", - unmatched_paths.len() - ); - println!("Unmatched URIs: {:?}", unmatched_paths); - } - - if matched_paths.is_empty() { - anyhow::bail!( - "No valid paths found for file '{}' in namespace '{}'.", - file_name, - namespace - ); - } - - if matched_paths.len() > 1 { - println!("Warning: Multiple unique paths found: {:?}", matched_paths); - println!("We should check the path for the local RSE - by cross-referencing with site capabilities."); - anyhow::bail!("Handling multiple matched paths is not implemented."); - } - - Ok(matched_paths.into_iter().next().unwrap()) -} - -fn mount_data(rse_path: &str, namespace: &str) -> Result<()> { - println!( - "Mounting data from RSE path: {} in namespace: {}", - rse_path, namespace - ); - - // Get the original user (already verified in check_privileges()) - let sudo_user = env::var("SUDO_USER").context("SUDO_USER not set")?; - - mount::mount_operation(rse_path, namespace, &sudo_user)?; - println!( - "Successfully mounted {} in namespace {}", - rse_path, namespace - ); + path_finder::mount_data(&rse_path, namespace)?; Ok(()) } diff --git a/src/path_finder.rs b/src/path_finder.rs new file mode 100644 index 0000000..4d32a67 --- /dev/null +++ b/src/path_finder.rs @@ -0,0 +1,117 @@ +//! Core path-finding logic: locating replica paths, checking local availability, and mounting. + +use anyhow::{Context, Result}; +use regex::Regex; +use std::collections::HashSet; +use std::env; + +use crate::models::{DataLocation, StorageAreaIDToNodeAndSite}; + +/// Prints each data location enriched with its node, site, and storage area name. +/// +/// Falls back to printing the raw storage area ID if the area is not found in `site_stores`. +pub fn print_data_locations_with_sites( + site_stores: &StorageAreaIDToNodeAndSite, + data_locations: &[DataLocation], +) { + for location in data_locations { + if let Some((node_name, site_name, area_name)) = + site_stores.get(&location.associated_storage_area_id) + { + println!( + "Data location ID: {}, Storage Area: {} ({}), Node: {}, Site: {}", + location.identifier, + area_name, + location.associated_storage_area_id, + node_name, + site_name + ); + } else { + println!( + "Data location ID: {}, Storage Area ID: {}, Node/Site: Not found", + location.identifier, location.associated_storage_area_id + ); + } + } +} + +/// Returns `true` if `rse_path` exists under the local `/skadata` mount point. +pub fn check_local_file_exists(rse_path: &str) -> bool { + use std::path::Path; + let local_path = format!("/skadata{}", rse_path); + Path::new(&local_path).exists() +} + +/// Extracts the canonical RSE path from the replica URIs in `data_locations`. +/// +/// Searches each replica URI for a `//...` suffix using a regex. Logs a +/// warning if some URIs do not match. Returns an error if no paths are found or if +/// multiple distinct paths are found (cross-site disambiguation is not yet implemented). +pub fn extract_rse_path( + data_locations: &[DataLocation], + namespace: &str, + file_name: &str, +) -> Result { + let pattern = format!(r"/{}/.*$", regex::escape(namespace)); + let rse_path_regex = Regex::new(&pattern)?; + + let mut matched_paths = HashSet::new(); + let mut unmatched_paths = Vec::new(); + + for location in data_locations { + for uri in &location.replicas { + if let Some(captures) = rse_path_regex.find(uri) { + matched_paths.insert(captures.as_str().to_string()); + } else { + unmatched_paths.push(uri.clone()); + } + } + } + + if !unmatched_paths.is_empty() { + println!( + "Warning: {} URIs did not match the expected pattern.", + unmatched_paths.len() + ); + println!("Unmatched URIs: {:?}", unmatched_paths); + } + + if matched_paths.is_empty() { + anyhow::bail!( + "No valid paths found for file '{}' in namespace '{}'.", + file_name, + namespace + ); + } + + if matched_paths.len() > 1 { + println!("Warning: Multiple unique paths found: {:?}", matched_paths); + println!( + "We should check the path for the local RSE - by cross-referencing with site capabilities." + ); + anyhow::bail!("Handling multiple matched paths is not implemented."); + } + + Ok(matched_paths.into_iter().next().unwrap()) +} + +/// Mounts the data file at `rse_path` into the `SUDO_USER`'s home directory via bindfs. +/// +/// Reads `SUDO_USER` from the environment (guaranteed to be set by [`crate::cli::check_privileges`]). +pub fn mount_data(rse_path: &str, namespace: &str) -> Result<()> { + println!( + "Mounting data from RSE path: {} in namespace: {}", + rse_path, namespace + ); + + // Get the original user (already verified in check_privileges()) + let sudo_user = env::var("SUDO_USER").context("SUDO_USER not set")?; + + crate::mount::mount_operation(rse_path, namespace, &sudo_user)?; + println!( + "Successfully mounted {} in namespace {}", + rse_path, namespace + ); + + Ok(()) +} From ee4977d912b79ef3b6b417025d82882a3fdc6efa Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Mon, 30 Mar 2026 17:14:47 +0100 Subject: [PATCH 22/27] chore(SOG-480): Add tests and docs to cli module --- src/cli.rs | 220 +++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 190 insertions(+), 30 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index 549e698..c62a756 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,4 +1,15 @@ //! CLI argument parsing and environment bootstrapping. +//! +//! This module owns everything that touches the command line and the process +//! environment before any network calls are made: +//! +//! * [`Args`] — the `clap`-derived struct that models the accepted flags. +//! * [`check_privileges`] — verifies the process is running as root via `sudo` +//! and that `SUDO_USER` is set, bailing out with a user-friendly re-invocation +//! hint otherwise. +//! * [`get_tokens_from_env`] — reads pre-issued API tokens from environment +//! variables, used when the caller wants to skip the OAuth2 device-code flow +//! (`--no-login`). use anyhow::{Context, Result}; use clap::Parser; @@ -7,58 +18,47 @@ use std::env; use crate::oauth2::Tokens; /// Command-line arguments for pathFinder. +/// +/// Parse these with [`clap::Parser::parse`]; the resulting struct is then +/// passed to [`check_privileges`] before any API work begins. #[derive(Parser, Debug)] #[command(name = "path-finder")] #[command(about = "A tool for finding SKA data paths for mounting purposes")] pub struct Args { - /// Namespace of the data + /// Namespace of the data (e.g. `"ska:ska-sdp/eb-m001-20240101-00000"`). #[arg(long)] pub namespace: String, - /// Name of the data file + /// Name of the data file within the namespace. #[arg(long)] pub file_name: String, - /// Do not use OAuth2 for authentication - use environment variables instead + /// Skip the OAuth2 device-code flow and read tokens from + /// `DATA_MANAGEMENT_ACCESS_TOKEN` and `SITE_CAPABILITIES_ACCESS_TOKEN` + /// instead. #[arg(long)] pub no_login: bool, - /// Unmount previously mounted data instead of mounting + /// Unmount a previously mounted file instead of mounting it. #[arg(long)] pub unmount: bool, } -/// Checks that the process is running as root via `sudo` and that `SUDO_USER` is set. +/// Checks that the process is running as root via `sudo` and that `SUDO_USER` +/// is set. +/// +/// Both conditions are necessary: the mount/unmount OS calls require root, and +/// `SUDO_USER` is used to build the bind-mount target path inside the invoking +/// user's home directory. Running as the root user directly (without `sudo`) +/// is rejected so that the home-directory expansion is always safe. /// -/// Exits early with a helpful message if not running with sufficient privileges. +/// Prints an actionable re-invocation hint to stderr before bailing. pub fn check_privileges(args: &Args) -> Result<()> { - // Check for root privileges early to avoid wasting time on API calls #[cfg(unix)] { let euid = unsafe { libc::geteuid() }; - if euid != 0 { - eprintln!("\nError: This tool requires root privileges for mount/unmount operations."); - eprintln!("Please re-run with sudo:"); - if args.unmount { - eprintln!( - " sudo -E path-finder --namespace {} --file_name {} --unmount", - args.namespace, args.file_name - ); - } else { - eprintln!( - " sudo -E path-finder --namespace {} --file_name {}", - args.namespace, args.file_name - ); - } - anyhow::bail!("Insufficient privileges - sudo required"); - } - - // Verify SUDO_USER is set - if env::var("SUDO_USER").is_err() { - eprintln!("\nWarning: SUDO_USER not set. Are you running as root directly?"); - eprintln!("Please use 'sudo' rather than running as root user."); - anyhow::bail!("SUDO_USER environment variable not set"); - } + let sudo_user = env::var("SUDO_USER").ok(); + check_privileges_impl(euid, sudo_user.as_deref(), args)?; } #[cfg(not(unix))] @@ -69,8 +69,47 @@ pub fn check_privileges(args: &Args) -> Result<()> { Ok(()) } +/// Inner implementation of [`check_privileges`] with injectable `euid` and +/// `sudo_user` values so the privilege logic can be unit-tested without +/// running the test suite as root. +/// +/// * `euid` — effective user-ID of the current process (`0` = root). +/// * `sudo_user` — value of the `SUDO_USER` environment variable, if set. +/// * `args` — parsed CLI flags, used to tailor the re-invocation hint. +#[cfg(unix)] +fn check_privileges_impl(euid: u32, sudo_user: Option<&str>, args: &Args) -> Result<()> { + if euid != 0 { + eprintln!("\nError: This tool requires root privileges for mount/unmount operations."); + eprintln!("Please re-run with sudo:"); + if args.unmount { + eprintln!( + " sudo -E path-finder --namespace {} --file_name {} --unmount", + args.namespace, args.file_name + ); + } else { + eprintln!( + " sudo -E path-finder --namespace {} --file_name {}", + args.namespace, args.file_name + ); + } + anyhow::bail!("Insufficient privileges - sudo required"); + } + + if sudo_user.is_none() { + eprintln!("\nWarning: SUDO_USER not set. Are you running as root directly?"); + eprintln!("Please use 'sudo' rather than running as root user."); + anyhow::bail!("SUDO_USER environment variable not set"); + } + + Ok(()) +} + /// Reads API access tokens from the `DATA_MANAGEMENT_ACCESS_TOKEN` and /// `SITE_CAPABILITIES_ACCESS_TOKEN` environment variables. +/// +/// This is the token source used with `--no-login`. Both variables must be +/// present; a descriptive error is returned if either is absent so the user +/// knows exactly which one to export. pub fn get_tokens_from_env() -> Result { let dm_token = env::var("DATA_MANAGEMENT_ACCESS_TOKEN") .context("Please set DATA_MANAGEMENT_ACCESS_TOKEN environment variable or omit --no-login to use OAuth2")?; @@ -83,3 +122,124 @@ pub fn get_tokens_from_env() -> Result { site_capabilities_token: sc_token, }) } + +#[cfg(test)] +mod tests { + use super::*; + use std::sync::Mutex; + + /// Serialise tests that mutate the process environment to avoid races when + /// the test binary runs suites in parallel. + static ENV_LOCK: Mutex<()> = Mutex::new(()); + + fn mount_args() -> Args { + Args { + namespace: "ska:ska-sdp/eb-m001-20240101-00000".into(), + file_name: "data.fits".into(), + no_login: false, + unmount: false, + } + } + + fn unmount_args() -> Args { + Args { + namespace: "ska:ska-sdp/eb-m001-20240101-00000".into(), + file_name: "data.fits".into(), + no_login: false, + unmount: true, + } + } + + // ── check_privileges_impl ──────────────────────────────────────────────── + + #[test] + #[cfg(unix)] + fn check_privileges_fails_when_not_root() { + let err = check_privileges_impl(1000, Some("alice"), &mount_args()).unwrap_err(); + assert!( + err.to_string().contains("sudo required"), + "unexpected error: {err}" + ); + } + + #[test] + #[cfg(unix)] + fn check_privileges_fails_when_not_root_and_unmounting() { + // The bail message is identical; this path exercises the branch that + // includes `--unmount` in the eprintln! hint. + let err = check_privileges_impl(1000, Some("alice"), &unmount_args()).unwrap_err(); + assert!( + err.to_string().contains("sudo required"), + "unexpected error: {err}" + ); + } + + #[test] + #[cfg(unix)] + fn check_privileges_fails_when_sudo_user_absent() { + let err = check_privileges_impl(0, None, &mount_args()).unwrap_err(); + assert!( + err.to_string().contains("SUDO_USER"), + "unexpected error: {err}" + ); + } + + #[test] + #[cfg(unix)] + fn check_privileges_succeeds_when_root_with_sudo_user() { + check_privileges_impl(0, Some("alice"), &mount_args()) + .expect("should succeed when euid == 0 and SUDO_USER is set"); + } + + // ── get_tokens_from_env ────────────────────────────────────────────────── + + #[test] + fn get_tokens_from_env_returns_tokens_when_both_set() { + let _lock = ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); + env::set_var("DATA_MANAGEMENT_ACCESS_TOKEN", "dm-test-token"); + env::set_var("SITE_CAPABILITIES_ACCESS_TOKEN", "sc-test-token"); + + let result = get_tokens_from_env(); + + env::remove_var("DATA_MANAGEMENT_ACCESS_TOKEN"); + env::remove_var("SITE_CAPABILITIES_ACCESS_TOKEN"); + + let tokens = result.expect("should succeed when both vars are set"); + assert_eq!(tokens.data_management_token, "dm-test-token"); + assert_eq!(tokens.site_capabilities_token, "sc-test-token"); + } + + #[test] + fn get_tokens_from_env_errors_when_dm_token_absent() { + let _lock = ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); + env::remove_var("DATA_MANAGEMENT_ACCESS_TOKEN"); + env::set_var("SITE_CAPABILITIES_ACCESS_TOKEN", "sc-test-token"); + + let result = get_tokens_from_env(); + + env::remove_var("SITE_CAPABILITIES_ACCESS_TOKEN"); + + let err = result.unwrap_err(); + assert!( + err.to_string().contains("DATA_MANAGEMENT_ACCESS_TOKEN"), + "unexpected error: {err}" + ); + } + + #[test] + fn get_tokens_from_env_errors_when_sc_token_absent() { + let _lock = ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); + env::set_var("DATA_MANAGEMENT_ACCESS_TOKEN", "dm-test-token"); + env::remove_var("SITE_CAPABILITIES_ACCESS_TOKEN"); + + let result = get_tokens_from_env(); + + env::remove_var("DATA_MANAGEMENT_ACCESS_TOKEN"); + + let err = result.unwrap_err(); + assert!( + err.to_string().contains("SITE_CAPABILITIES_ACCESS_TOKEN"), + "unexpected error: {err}" + ); + } +} From 0a4a8b4b11dd4e533a4cc2cbc9eb9e2303d07d70 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Tue, 31 Mar 2026 11:34:37 +0100 Subject: [PATCH 23/27] chore(SOG-480): Add test coverage to path_finders module --- src/path_finder.rs | 301 +++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 290 insertions(+), 11 deletions(-) diff --git a/src/path_finder.rs b/src/path_finder.rs index 4d32a67..75e9614 100644 --- a/src/path_finder.rs +++ b/src/path_finder.rs @@ -1,15 +1,27 @@ //! Core path-finding logic: locating replica paths, checking local availability, and mounting. +//! +//! This module bridges the API layer ([`crate::api_client`]) and the OS layer +//! ([`crate::mount`]). The four public functions implement the logical +//! steps in the mount workflow: +//! use anyhow::{Context, Result}; use regex::Regex; use std::collections::HashSet; use std::env; +use std::path::Path; use crate::models::{DataLocation, StorageAreaIDToNodeAndSite}; /// Prints each data location enriched with its node, site, and storage area name. /// -/// Falls back to printing the raw storage area ID if the area is not found in `site_stores`. +/// `site_stores` is the [`StorageAreaIDToNodeAndSite`] map produced by +/// [`crate::api_client::PathFinderApiClient::site_storage_areas`]; it maps +/// storage-area UUIDs to `(node_name, site_name, area_name)` tuples. +/// +/// When a location's `associated_storage_area_id` is not found in the map the +/// function falls back to printing the raw UUID so the caller still has +/// something actionable. pub fn print_data_locations_with_sites( site_stores: &StorageAreaIDToNodeAndSite, data_locations: &[DataLocation], @@ -37,16 +49,34 @@ pub fn print_data_locations_with_sites( /// Returns `true` if `rse_path` exists under the local `/skadata` mount point. pub fn check_local_file_exists(rse_path: &str) -> bool { - use std::path::Path; - let local_path = format!("/skadata{}", rse_path); + check_local_file_exists_impl(rse_path, "/skadata") +} + +/// Inner implementation of [`check_local_file_exists`] with an injectable +/// `base` directory, allowing unit tests to probe a test directory instead +/// of `/skadata`. +fn check_local_file_exists_impl(rse_path: &str, base: &str) -> bool { + let local_path = format!("{}{}", base, rse_path); Path::new(&local_path).exists() } /// Extracts the canonical RSE path from the replica URIs in `data_locations`. /// -/// Searches each replica URI for a `//...` suffix using a regex. Logs a -/// warning if some URIs do not match. Returns an error if no paths are found or if -/// multiple distinct paths are found (cross-site disambiguation is not yet implemented). +/// Each replica URI (e.g. +/// `"davs://xrootd01.cam.uksrc.org:1094/skadata/ska:ska-sdp/eb-m001/data.fits"`) +/// is searched for a `//…` suffix. The suffix becomes the RSE +/// path that is later passed to [`mount_data`]. +/// +/// **Error conditions:** +/// - No URIs match the pattern +/// - Two or more *distinct* paths are found across all replicas +/// +/// Duplicate URIs pointing to the same path (i.e. the same file staged to +/// multiple replicas of the same RSE) are de-duplicated silently; only +/// *distinct* paths trigger the multiple-paths error. +/// +/// A warning is printed for each URI that does not match, but this is not +/// treated as fatal pub fn extract_rse_path( data_locations: &[DataLocation], namespace: &str, @@ -95,19 +125,40 @@ pub fn extract_rse_path( Ok(matched_paths.into_iter().next().unwrap()) } -/// Mounts the data file at `rse_path` into the `SUDO_USER`'s home directory via bindfs. +/// Mounts the data file at `rse_path` into the invoking user's home directory. +/// +/// Reads `SUDO_USER` from the environment (set by `sudo`; guaranteed to be +/// present after [`crate::cli::check_privileges`] succeeds) and delegates to +/// [`crate::mount::mount_operation`]. /// -/// Reads `SUDO_USER` from the environment (guaranteed to be set by [`crate::cli::check_privileges`]). +/// Prints progress messages to stdout before and after the mount syscall. pub fn mount_data(rse_path: &str, namespace: &str) -> Result<()> { + let sudo_user = env::var("SUDO_USER").context("SUDO_USER not set")?; + mount_data_impl(rse_path, namespace, &sudo_user, crate::mount::mount_operation) +} + +/// Inner implementation of [`mount_data`] with an injectable `mount_fn` and +/// `sudo_user`, so the code can be tested without performing a +/// real OS mount. +/// +/// * `rse_path` — the `//…` path on the RSE. +/// * `namespace` — the data namespace (used for bind-mount target naming). +/// * `sudo_user` — the original (non-root) user on whose behalf to mount. +/// * `mount_fn` — called as `mount_fn(rse_path, namespace, sudo_user)`; in +/// production this is [`crate::mount::mount_operation`]. +fn mount_data_impl( + rse_path: &str, + namespace: &str, + sudo_user: &str, + mount_fn: impl Fn(&str, &str, &str) -> Result<()>, +) -> Result<()> { println!( "Mounting data from RSE path: {} in namespace: {}", rse_path, namespace ); - // Get the original user (already verified in check_privileges()) - let sudo_user = env::var("SUDO_USER").context("SUDO_USER not set")?; + mount_fn(rse_path, namespace, sudo_user)?; - crate::mount::mount_operation(rse_path, namespace, &sudo_user)?; println!( "Successfully mounted {} in namespace {}", rse_path, namespace @@ -115,3 +166,231 @@ pub fn mount_data(rse_path: &str, namespace: &str) -> Result<()> { Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + use std::collections::HashMap; + use std::sync::Mutex; + use tempfile::TempDir; + + /// Serialise tests that mutate the process environment. + static ENV_LOCK: Mutex<()> = Mutex::new(()); + + // ── helpers ────────────────────────────────────────────────────────────── + + fn make_location(id: &str, area_id: &str, replicas: &[&str]) -> DataLocation { + DataLocation { + identifier: id.to_string(), + associated_storage_area_id: area_id.to_string(), + replicas: replicas.iter().map(|s| s.to_string()).collect(), + is_dataset: false, + } + } + + const OLYMPUSMONS_AREA_ID: &str = "12345678-90ab-cdef-1234-567890abcdef"; + + fn make_site_stores() -> StorageAreaIDToNodeAndSite { + let mut m = HashMap::new(); + m.insert( + OLYMPUSMONS_AREA_ID.to_string(), + ( + "MARSSRC".to_string(), + "MARSSRC-OLYMPUSMONS".to_string(), + "MARSSRC_OLYMPUSMONS_XRD".to_string(), + ), + ); + m + } + + // ── print_data_locations_with_sites ────────────────────────────────────── + + #[test] + fn print_locations_does_not_panic() { + let stores = make_site_stores(); + let locations = vec![make_location( + "MARSSRC-OLYMPUSMONS-T0", + OLYMPUSMONS_AREA_ID, + &["davs://xrootd01.example.org:1094/skadata/ska:ns/data.fits"], + )]; + // We exercise the enriched branch; the test passes if no panic occurs. + print_data_locations_with_sites(&stores, &locations); + } + + #[test] + fn print_locations_does_not_panic_with_empty_slice() { + let stores = make_site_stores(); + print_data_locations_with_sites(&stores, &[]); + } + + // ── check_local_file_exists_impl ───────────────────────────────────────── + + #[test] + fn check_local_file_exists_returns_true_when_file_present() { + let test_dir = TempDir::new().unwrap(); + let base_path = test_dir.path().to_str().unwrap(); + // Create /skadata/ska:ns/data.fits by writing a file at the path. + let rse_path = "/ska:ns/data.fits"; + let full = test_dir.path().join("ska:ns").join("data.fits"); + std::fs::create_dir_all(full.parent().unwrap()).unwrap(); + std::fs::write(&full, b"").unwrap(); + + assert!(check_local_file_exists_impl( + rse_path, + base_path + )); + } + + #[test] + fn check_local_file_exists_returns_false_when_file_absent() { + let test_dir = TempDir::new().unwrap(); + let base_path = test_dir.path().to_str().unwrap(); + assert!(!check_local_file_exists_impl( + "/ska:ns/missing.fits", + base_path + )); + } + + // ── extract_rse_path ───────────────────────────────────────────────────── + + #[test] + fn extract_rse_path_returns_path_for_single_match() { + let ns = "ska:ska-sdp/eb-m001-20240101-00000"; + let locations = vec![make_location( + "MARSSRC-OLYMPUSMONS-T0", + OLYMPUSMONS_AREA_ID, + &[&format!( + "davs://xrootd01.example.org:1094/skadata/{ns}/data.fits" + )], + )]; + + let result = extract_rse_path(&locations, ns, "data.fits").unwrap(); + assert_eq!(result, format!("/{ns}/data.fits")); + } + + #[test] + fn extract_rse_path_deduplicates_identical_paths_across_replicas() { + let ns = "ska:ska-sdp/eb-m001-20240101-00000"; + let uri = format!("davs://xrootd01.example.org:1094/skadata/{ns}/data.fits"); + // Same logical path served from two replica URIs → should succeed. + let locations = vec![ + make_location("MARSSRC-OLYMPUSMONS-T0", OLYMPUSMONS_AREA_ID, &[uri.as_str()]), + make_location("MARSSRC-OLYMPUSMONS-T1", OLYMPUSMONS_AREA_ID, &[uri.as_str()]), + ]; + + let result = extract_rse_path(&locations, ns, "data.fits").unwrap(); + assert_eq!(result, format!("/{ns}/data.fits")); + } + + #[test] + fn extract_rse_path_errors_when_no_locations() { + let err = extract_rse_path(&[], "ska:ns", "data.fits").unwrap_err(); + assert!( + err.to_string().contains("No valid paths found"), + "unexpected error: {err}" + ); + } + + #[test] + fn extract_rse_path_errors_when_no_replicas_match() { + let locations = vec![make_location( + "MARSSRC-OLYMPUSMONS-T0", + OLYMPUSMONS_AREA_ID, + &["davs://xrootd01.example.org:1094/unrelated/path/data.fits"], + )]; + let err = extract_rse_path(&locations, "ska:ns", "data.fits").unwrap_err(); + assert!( + err.to_string().contains("No valid paths found"), + "unexpected error: {err}" + ); + } + + #[test] + fn extract_rse_path_errors_when_multiple_distinct_paths() { + let ns = "ska:ns"; + let locations = vec![ + make_location( + "MARSSRC-OLYMPUSMONS-T0", + OLYMPUSMONS_AREA_ID, + &[&format!( + "davs://xrootd01.example.org:1094/skadata/{ns}/v1/data.fits" + )], + ), + make_location( + "MARSSRC-OLYMPUSMONS-T1", + OLYMPUSMONS_AREA_ID, + &[&format!( + "davs://xrootd01.example.org:1094/skadata/{ns}/v2/data.fits" + )], + ), + ]; + + let err = extract_rse_path(&locations, ns, "data.fits").unwrap_err(); + assert!( + err.to_string().contains("not implemented"), + "unexpected error: {err}" + ); + } + + #[test] + fn extract_rse_path_escapes_special_chars_in_namespace() { + // Namespaces contain ':', '/', and '-' which carry meaning in regex + // without escaping. Verify the regex still matches correctly. + let ns = "ska:ska-sdp/eb-m001"; + let locations = vec![make_location( + "MARSSRC-OLYMPUSMONS-T0", + OLYMPUSMONS_AREA_ID, + &[&format!( + "davs://xrootd01.example.org:1094/skadata/{ns}/data.fits" + )], + )]; + + let result = extract_rse_path(&locations, ns, "data.fits").unwrap(); + assert_eq!(result, format!("/{ns}/data.fits")); + } + + // ── mount_data_impl ────────────────────────────────────────────────────── + + #[test] + fn mount_data_impl_calls_mount_fn_with_correct_args() { + use std::cell::Cell; + + let called = Cell::new(false); + let mock_mount = |rse: &str, ns: &str, user: &str| -> Result<()> { + assert_eq!(rse, "/ska:ns/data.fits"); + assert_eq!(ns, "ska:ns"); + assert_eq!(user, "alice"); + called.set(true); + Ok(()) + }; + + mount_data_impl("/ska:ns/data.fits", "ska:ns", "alice", mock_mount).unwrap(); + assert!(called.get(), "mount_fn was never called"); + } + + #[test] + fn mount_data_impl_propagates_mount_fn_error() { + let failing_mount = |_: &str, _: &str, _: &str| -> Result<()> { + anyhow::bail!("bindfs failed"); + }; + + let err = mount_data_impl("/ska:ns/data.fits", "ska:ns", "alice", failing_mount) + .unwrap_err(); + assert!( + err.to_string().contains("bindfs failed"), + "unexpected error: {err}" + ); + } + + #[test] + fn mount_data_errors_when_sudo_user_not_set() { + let _lock = ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); + env::remove_var("SUDO_USER"); + + let err = mount_data("/ska:ns/data.fits", "ska:ns").unwrap_err(); + assert!( + err.to_string().contains("SUDO_USER"), + "unexpected error: {err}" + ); + } +} From a5be085a2f96f88a5e0d281c9fb81ed6cb320791 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Tue, 31 Mar 2026 12:29:15 +0100 Subject: [PATCH 24/27] chore(SOG-480): Add tests to the main module --- src/main.rs | 389 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 380 insertions(+), 9 deletions(-) diff --git a/src/main.rs b/src/main.rs index 6b4c931..da3821e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -8,10 +8,10 @@ mod path_finder; use anyhow::{Context, Result}; use clap::Parser; use std::env; -use std::process::exit; use api_client::{ApiClient, PathFinderApiClient}; -use cli::{Args, check_privileges, get_tokens_from_env}; +use cli::{check_privileges, get_tokens_from_env, Args}; +use models::{DataLocation, StorageAreaIDToNodeAndSite}; use oauth2::{authenticate, Tokens}; fn main() -> Result<()> { @@ -41,35 +41,406 @@ fn main() -> Result<()> { run(&args.namespace, &args.file_name, &tokens) } +/// Production wrapper: constructs an [`ApiClient`] from the supplied tokens and +/// delegates to [`run_impl`] with the real path-finder helpers and [`do_exit`]. fn run(namespace: &str, file_name: &str, tokens: &Tokens) -> Result<()> { let client = ApiClient::new( tokens.data_management_token.clone(), tokens.site_capabilities_token.clone(), ); + run_impl( + namespace, + file_name, + &client, + path_finder::print_data_locations_with_sites, + path_finder::extract_rse_path, + path_finder::check_local_file_exists, + path_finder::mount_data, + do_exit, + ) +} + +/// Wraps [`std::process::exit`] so that [`run_impl`] can accept an injectable +/// `Fn(i32)` rather than calling `process::exit` directly, keeping the +/// orchestration logic unit-testable without spawning a subprocess. +fn do_exit(code: i32) { + std::process::exit(code); +} +/// Core code for the mount workflow. +/// +/// All external dependencies are injected so the function can be exercised in +/// unit tests without live API endpoints, a real `/skadata` tree, or root +/// privileges. +/// +/// # Parameters +/// * `namespace` — data namespace passed on the command line. +/// * `file_name` — file name passed on the command line. +/// * `client` — SRCNet API client; see [`PathFinderApiClient`]. +/// * `print_locations` — displays the replica list enriched with site names. +/// Called once on the happy path and a second time when +/// the file has not yet been staged locally. +/// * `extract_path` — extracts the `//…` RSE path from replica URIs. +/// * `file_exists` — returns `true` when the file is present under `/skadata`. +/// * `mount` — performs the OS-level bind mount. +/// * `exit_fn` — called with `1` when the file is not locally staged. +/// In production this is [`do_exit`], which does not return. +fn run_impl( + namespace: &str, + file_name: &str, + client: &dyn PathFinderApiClient, + print_locations: impl Fn(&StorageAreaIDToNodeAndSite, &[DataLocation]), + extract_path: impl Fn(&[DataLocation], &str, &str) -> Result, + file_exists: impl Fn(&str) -> bool, + mount: impl Fn(&str, &str) -> Result<()>, + exit_fn: impl Fn(i32), +) -> Result<()> { client.check_namespace_available(namespace)?; let site_storages = client.site_storage_areas()?; let data_locations = client.locate_data(namespace, file_name)?; - path_finder::print_data_locations_with_sites(&site_storages, &data_locations); + print_locations(&site_storages, &data_locations); - let rse_path = path_finder::extract_rse_path(&data_locations, namespace, file_name)?; + let rse_path = extract_path(&data_locations, namespace, file_name)?; println!( "RSE Path for file '{}' in namespace '{}': {}", file_name, namespace, rse_path ); - // Check if the file exists locally - if !path_finder::check_local_file_exists(&rse_path) { + if !file_exists(&rse_path) { println!("\n⚠️ File not found locally! ⚠️"); println!("\nThe file is available at the following locations:"); - path_finder::print_data_locations_with_sites(&site_storages, &data_locations); + print_locations(&site_storages, &data_locations); println!("\nPlease ensure the data has been staged to this local site before mounting."); - exit(1); + exit_fn(1); + return Ok(()); // unreachable in production (do_exit never returns) } - path_finder::mount_data(&rse_path, namespace)?; + mount(&rse_path, namespace)?; Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + use models::DataLocationAPIResponse; + use std::cell::{Cell, RefCell}; + use std::collections::HashMap; + + // ── constants ──────────────────────────────────────────────────────────── + + const NS: &str = "ska:ska-sdp/eb-m001-20240101-00000"; + const FILE: &str = "data.fits"; + const RSE_PATH: &str = "/ska:ska-sdp/eb-m001-20240101-00000/data.fits"; + const OLYMPUSMONS_AREA_ID: &str = "2a73d212-8793-4011-a687-cad99841c269"; + + // ── helpers ────────────────────────────────────────────────────────────── + + fn make_location() -> DataLocation { + DataLocation { + identifier: "MARSSRC-OLYMPUSMONS-T0".into(), + associated_storage_area_id: OLYMPUSMONS_AREA_ID.into(), + replicas: vec![format!( + "davs://xrootd01.example.org:1094/skadata{RSE_PATH}" + )], + is_dataset: false, + } + } + + fn make_site_storages() -> StorageAreaIDToNodeAndSite { + let mut m = HashMap::new(); + m.insert( + OLYMPUSMONS_AREA_ID.to_string(), + ( + "MARSSRC".to_string(), + "MARSSRC-OLYMPUSMONS".to_string(), + "MARSSRC_OLYMPUSMONS_XRD".to_string(), + ), + ); + m + } + + // ── MockApiClient ──────────────────────────────────────────────────────── + + struct MockApiClient { + namespace_ok: bool, + site_storages_ok: bool, + locate_data_ok: bool, + // call recording + check_namespace_called_with: RefCell>, + site_storages_called: Cell, + locate_data_called_with: RefCell>, + } + + impl MockApiClient { + fn new_golden() -> Self { + Self { + namespace_ok: true, + site_storages_ok: true, + locate_data_ok: true, + check_namespace_called_with: RefCell::new(None), + site_storages_called: Cell::new(false), + locate_data_called_with: RefCell::new(None), + } + } + } + + impl PathFinderApiClient for MockApiClient { + fn check_namespace_available(&self, namespace: &str) -> Result<()> { + *self.check_namespace_called_with.borrow_mut() = Some(namespace.to_string()); + if self.namespace_ok { + Ok(()) + } else { + anyhow::bail!("namespace '{}' not available", namespace) + } + } + + fn get_all_namespaces(&self) -> Result> { + Ok(vec![NS.to_string()]) + } + + fn site_storage_areas(&self) -> Result { + self.site_storages_called.set(true); + if self.site_storages_ok { + Ok(make_site_storages()) + } else { + anyhow::bail!("site_storage_areas failed") + } + } + + fn locate_data(&self, namespace: &str, file_name: &str) -> Result { + *self.locate_data_called_with.borrow_mut() = + Some((namespace.to_string(), file_name.to_string())); + if self.locate_data_ok { + Ok(vec![make_location()]) + } else { + anyhow::bail!("locate_data API error") + } + } + } + + // ── run_impl tests ─────────────────────────────────────────────────────── + + #[test] + fn run_impl_golden_path_calls_all_with_correct_args() { + let client = MockApiClient::new_golden(); + let print_count = Cell::new(0u32); + let extract_called_with: RefCell> = RefCell::new(None); + let file_exists_called_with: RefCell> = RefCell::new(None); + let mount_called_with: RefCell> = RefCell::new(None); + let exit_called = Cell::new(false); + + run_impl( + NS, + FILE, + &client, + |_, _| { + print_count.set(print_count.get() + 1); + }, + |_locs, ns, file| { + *extract_called_with.borrow_mut() = Some((ns.to_string(), file.to_string())); + Ok(RSE_PATH.to_string()) + }, + |rse| { + *file_exists_called_with.borrow_mut() = Some(rse.to_string()); + true + }, + |rse, ns| { + *mount_called_with.borrow_mut() = Some((rse.to_string(), ns.to_string())); + Ok(()) + }, + |_| exit_called.set(true), + ) + .unwrap(); + + // API client called with the right args + assert_eq!( + client.check_namespace_called_with.borrow().as_deref(), + Some(NS), + "check_namespace_available arg" + ); + assert!( + client.site_storages_called.get(), + "site_storage_areas called" + ); + assert_eq!( + *client.locate_data_called_with.borrow(), + Some((NS.to_string(), FILE.to_string())), + "locate_data args" + ); + + // path-finder helpers called with the right args + assert_eq!(print_count.get(), 1, "print_locations called exactly once"); + assert_eq!( + *extract_called_with.borrow(), + Some((NS.to_string(), FILE.to_string())), + "extract_path args" + ); + assert_eq!( + file_exists_called_with.borrow().as_deref(), + Some(RSE_PATH), + "file_exists arg" + ); + assert_eq!( + *mount_called_with.borrow(), + Some((RSE_PATH.to_string(), NS.to_string())), + "mount args" + ); + assert!( + !exit_called.get(), + "exit_fn must not be called on golden path" + ); + } + + #[test] + fn run_impl_calls_exit_and_skips_mount_when_file_not_staged() { + let client = MockApiClient::new_golden(); + let exit_called = Cell::new(false); + let mount_called = Cell::new(false); + + run_impl( + NS, + FILE, + &client, + |_, _| {}, + |_, _, _| Ok(RSE_PATH.to_string()), + |_| false, // file not present locally + |_, _| { + mount_called.set(true); + Ok(()) + }, + |_| exit_called.set(true), + ) + .unwrap(); + + assert!(exit_called.get(), "exit_fn should be called"); + assert!( + !mount_called.get(), + "mount must not be called when file not staged" + ); + } + + #[test] + fn run_impl_prints_locations_twice_when_file_not_staged() { + let client = MockApiClient::new_golden(); + let print_count = Cell::new(0u32); + + run_impl( + NS, + FILE, + &client, + |_, _| print_count.set(print_count.get() + 1), + |_, _, _| Ok(RSE_PATH.to_string()), + |_| false, + |_, _| Ok(()), + |_| {}, + ) + .unwrap(); + + assert_eq!( + print_count.get(), + 2, + "print_locations should be called twice when file not staged" + ); + } + + #[test] + fn run_impl_propagates_namespace_not_available_error() { + let client = MockApiClient { + namespace_ok: false, + ..MockApiClient::new_golden() + }; + + let err = run_impl( + NS, + FILE, + &client, + |_, _| {}, + |_, _, _| unreachable!("extract_path must not be called"), + |_| unreachable!("file_exists must not be called"), + |_, _| unreachable!("mount must not be called"), + |_| unreachable!("exit_fn must not be called"), + ) + .unwrap_err(); + + assert!( + err.to_string().contains("not available"), + "unexpected error: {err}" + ); + assert_eq!( + client.check_namespace_called_with.borrow().as_deref(), + Some(NS) + ); + } + + #[test] + fn run_impl_propagates_locate_data_error() { + let client = MockApiClient { + locate_data_ok: false, + ..MockApiClient::new_golden() + }; + + let err = run_impl( + NS, + FILE, + &client, + |_, _| {}, + |_, _, _| unreachable!("extract_path must not be called"), + |_| unreachable!("file_exists must not be called"), + |_, _| unreachable!("mount must not be called"), + |_| unreachable!("exit_fn must not be called"), + ) + .unwrap_err(); + + assert!( + err.to_string().contains("locate_data API error"), + "unexpected error: {err}" + ); + } + + #[test] + fn run_impl_propagates_extract_path_error() { + let client = MockApiClient::new_golden(); + + let err = run_impl( + NS, + FILE, + &client, + |_, _| {}, + |_, _, _| anyhow::bail!("no matching replica paths"), + |_| unreachable!("file_exists must not be called"), + |_, _| unreachable!("mount must not be called"), + |_| unreachable!("exit_fn must not be called"), + ) + .unwrap_err(); + + assert!( + err.to_string().contains("no matching replica paths"), + "unexpected error: {err}" + ); + } + + #[test] + fn run_impl_propagates_mount_error() { + let client = MockApiClient::new_golden(); + + let err = run_impl( + NS, + FILE, + &client, + |_, _| {}, + |_, _, _| Ok(RSE_PATH.to_string()), + |_| true, // file exists + |_, _| anyhow::bail!("bindfs: permission denied"), + |_| unreachable!("exit_fn must not be called"), + ) + .unwrap_err(); + + assert!( + err.to_string().contains("bindfs: permission denied"), + "unexpected error: {err}" + ); + } +} From f791274acc8bb779ad047cd6b6bc82071cd46c60 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Tue, 31 Mar 2026 12:29:45 +0100 Subject: [PATCH 25/27] chore(SOG-480): Remove references to RAL and CAM UKSRC --- src/main.rs | 2 +- src/models.rs | 186 +++++++++++++++++++++++++++++++++------------ src/path_finder.rs | 44 ++++++----- 3 files changed, 164 insertions(+), 68 deletions(-) diff --git a/src/main.rs b/src/main.rs index da3821e..6b75ead 100644 --- a/src/main.rs +++ b/src/main.rs @@ -143,7 +143,7 @@ mod tests { identifier: "MARSSRC-OLYMPUSMONS-T0".into(), associated_storage_area_id: OLYMPUSMONS_AREA_ID.into(), replicas: vec![format!( - "davs://xrootd01.example.org:1094/skadata{RSE_PATH}" + "davs://xrootd01.olympusmons.marssrc.org:1094/skadata{RSE_PATH}" )], is_dataset: false, } diff --git a/src/models.rs b/src/models.rs index 3f28e03..23a0118 100644 --- a/src/models.rs +++ b/src/models.rs @@ -8,10 +8,10 @@ use std::collections::HashMap; /// Example: /// /// { -/// "identifier": "UKSRC-CAM-T0", +/// "identifier": "MARSSRC-OLYMPUSMONS-T0", /// "associated_storage_area_id": "2a73d212-8793-4011-a687-cad99841c269", /// "replicas": [ -/// "davs://xrootd01.cam.uksrc.org:1094/skadata/daac/08/06/random10MiB.bin" +/// "davs://xrootd01.olympusmons.marssrc.org:1094/skadata/daac/08/06/random10MiB.bin" /// ], /// "is_dataset": false /// } @@ -37,7 +37,7 @@ pub type DataLocationAPIResponse = Vec; /// "id": "ce04d165-4d5f-4380-a674-2a9ae4aba75e", /// "type": "rse", /// "relative_path": "/", -/// "name": "UKSRC_RAL_XRD", +/// "name": "MARSSRC_VALLESMARINERIS_XRD", /// "tier": 1 /// } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -64,7 +64,7 @@ pub struct StorageArea { /// "srm": "xrd", /// "device_type": "hdd", /// "size_in_terabytes": 200, -/// "name": "UKSRC_RAL_XRD", +/// "name": "MARSSRC_VALLESMARINERIS_XRD", /// "supported_protocols": [ /// { /// "prefix": "https", @@ -101,7 +101,7 @@ pub struct Storage { /// Example: /// { /// "id": "12345678-90ab-cdef-1234-567890abcdef", -/// "name": "UKSRC-RAL", +/// "name": "MARSSRC-VALLESMARINERIS", /// "description": "Rutherford Appleton Laboratory", /// "country": "GB", /// "latitude": 51.5707, @@ -142,8 +142,8 @@ impl Site { /// /// Example: /// { -/// "name": "UKSRC", -/// "description": "UKSRC Node", +/// "name": "MARSSRC", +/// "description": "MARSSRC Node", /// "sites": [ /// ... /// ], @@ -160,7 +160,7 @@ pub struct Node { #[serde(default)] pub sites: Vec, // ... other fields omitted - } +} impl Node { /// Builds a map from storage area ID to a `(node_name, site_name, area_name)` tuple @@ -200,10 +200,10 @@ pub fn get_all_node_storage_areas(nodes: &[Node]) -> StorageAreaIDToNodeAndSite mod tests { use super::*; - const RAL_AREA_ID: &str = "ce04d165-4d5f-4380-a674-2a9ae4aba75e"; - const CAM_AREA_ID: &str = "2a73d212-8793-4011-a687-cad99841c269"; - const RAL_SITE_ID: &str = "a1b2c3d4-e5f6-7890-abcd-ef1234567890"; - const CAM_SITE_ID: &str = "b2c3d4e5-f6a7-8901-bcde-f12345678901"; + const VALLESMARINERIS_AREA_ID: &str = "ce04d165-4d5f-4380-a674-2a9ae4aba75e"; + const OLYMPUSMONS_AREA_ID: &str = "2a73d212-8793-4011-a687-cad99841c269"; + const VALLESMARINERIS_SITE_ID: &str = "a1b2c3d4-e5f6-7890-abcd-ef1234567890"; + const OLYMPUSMONS_SITE_ID: &str = "b2c3d4e5-f6a7-8901-bcde-f12345678901"; // --- helpers --- @@ -246,23 +246,33 @@ mod tests { #[test] fn site_storage_areas_empty_storages_returns_empty() { - let site = make_site(RAL_SITE_ID, "UKSRC-RAL", vec![]); + let site = make_site(VALLESMARINERIS_SITE_ID, "MARSSRC-VALLESMARINERIS", vec![]); assert!(site.storage_areas().is_empty()); } #[test] fn site_storage_areas_flattens_multiple_storages() { let site = make_site( - RAL_SITE_ID, - "UKSRC-RAL", + VALLESMARINERIS_SITE_ID, + "MARSSRC-VALLESMARINERIS", vec![ - make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")]), + make_storage( + "st1", + "MARSSRC_VALLESMARINERIS_XRD", + vec![make_area( + VALLESMARINERIS_AREA_ID, + "MARSSRC_VALLESMARINERIS_XRD", + )], + ), make_storage( "st2", - "UKSRC_RAL_STORM", + "MARSSRC_VALLESMARINERIS_STORM", vec![ - make_area(CAM_AREA_ID, "UKSRC_RAL_STORM"), - make_area("c3d4e5f6-a7b8-9012-cdef-123456789012", "UKSRC_RAL_TAPE"), + make_area(OLYMPUSMONS_AREA_ID, "MARSSRC_VALLESMARINERIS_STORM"), + make_area( + "c3d4e5f6-a7b8-9012-cdef-123456789012", + "MARSSRC_VALLESMARINERIS_TAPE", + ), ], ), ], @@ -270,8 +280,8 @@ mod tests { let areas = site.storage_areas(); assert_eq!(areas.len(), 3); let ids: Vec<&str> = areas.iter().map(|a| a.id.as_str()).collect(); - assert!(ids.contains(&RAL_AREA_ID)); - assert!(ids.contains(&CAM_AREA_ID)); + assert!(ids.contains(&VALLESMARINERIS_AREA_ID)); + assert!(ids.contains(&OLYMPUSMONS_AREA_ID)); assert!(ids.contains(&"c3d4e5f6-a7b8-9012-cdef-123456789012")); } @@ -279,41 +289,73 @@ mod tests { #[test] fn storage_area_id_to_site_name_empty_sites_returns_empty() { - let node = make_node("UKSRC", vec![]); + let node = make_node("MARSSRC", vec![]); assert!(node.storage_area_id_to_site_name().is_empty()); } #[test] fn storage_area_id_to_site_name_maps_correctly() { let node = make_node( - "UKSRC", + "MARSSRC", vec![make_site( - RAL_SITE_ID, - "UKSRC-RAL", - vec![make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")])], + VALLESMARINERIS_SITE_ID, + "MARSSRC-VALLESMARINERIS", + vec![make_storage( + "st1", + "MARSSRC_VALLESMARINERIS_XRD", + vec![make_area( + VALLESMARINERIS_AREA_ID, + "MARSSRC_VALLESMARINERIS_XRD", + )], + )], )], ); let map = node.storage_area_id_to_site_name(); assert_eq!(map.len(), 1); - let (node_name, site_name, area_name) = map.get(RAL_AREA_ID).unwrap(); - assert_eq!(node_name, "UKSRC"); - assert_eq!(site_name, "UKSRC-RAL"); - assert_eq!(area_name, "UKSRC_RAL_XRD"); + let (node_name, site_name, area_name) = map.get(VALLESMARINERIS_AREA_ID).unwrap(); + assert_eq!(node_name, "MARSSRC"); + assert_eq!(site_name, "MARSSRC-VALLESMARINERIS"); + assert_eq!(area_name, "MARSSRC_VALLESMARINERIS_XRD"); } #[test] fn storage_area_id_to_site_name_multiple_sites() { let node = make_node( - "UKSRC", + "MARSSRC", vec![ - make_site(RAL_SITE_ID, "UKSRC-RAL", vec![make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")])]), - make_site(CAM_SITE_ID, "UKSRC-CAM", vec![make_storage("st2", "UKSRC_CAM_XRD", vec![make_area(CAM_AREA_ID, "UKSRC_CAM_XRD")])]), + make_site( + VALLESMARINERIS_SITE_ID, + "MARSSRC-VALLESMARINERIS", + vec![make_storage( + "st1", + "MARSSRC_VALLESMARINERIS_XRD", + vec![make_area( + VALLESMARINERIS_AREA_ID, + "MARSSRC_VALLESMARINERIS_XRD", + )], + )], + ), + make_site( + OLYMPUSMONS_SITE_ID, + "MARSSRC-OLYMPUSMONS", + vec![make_storage( + "st2", + "MARSSRC_OLYMPUSMONS_XRD", + vec![make_area(OLYMPUSMONS_AREA_ID, "MARSSRC_OLYMPUSMONS_XRD")], + )], + ), ], ); let map = node.storage_area_id_to_site_name(); assert_eq!(map.len(), 2); - assert_eq!(map.get(RAL_AREA_ID).unwrap().1, "UKSRC-RAL"); - assert_eq!(map.get(CAM_AREA_ID).unwrap().1, "UKSRC-CAM"); + assert_eq!( + map.get(VALLESMARINERIS_AREA_ID).unwrap().1, + "MARSSRC-VALLESMARINERIS" + ); + assert_eq!( + map.get(OLYMPUSMONS_AREA_ID).unwrap().1, + "MARSSRC-OLYMPUSMONS" + ); } // --- get_all_node_storage_areas --- @@ -329,17 +371,36 @@ mod tests { let aussrc_area_id = "d4e5f6a7-b8c9-0123-defa-234567890123"; let nodes = vec![ make_node( - "UKSRC", - vec![make_site(RAL_SITE_ID, "UKSRC-RAL", vec![make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")])])], + "MARSSRC", + vec![make_site( + VALLESMARINERIS_SITE_ID, + "MARSSRC-VALLESMARINERIS", + vec![make_storage( + "st1", + "MARSSRC_VALLESMARINERIS_XRD", + vec![make_area( + VALLESMARINERIS_AREA_ID, + "MARSSRC_VALLESMARINERIS_XRD", + )], + )], + )], ), make_node( "AUSSRC", - vec![make_site("e5f6a7b8-c9d0-1234-efab-345678901234", "AUSSRC-ICRAR", vec![make_storage("st2", "AUSSRC_ICRAR_XRD", vec![make_area(aussrc_area_id, "AUSSRC_ICRAR_XRD")])])], + vec![make_site( + "e5f6a7b8-c9d0-1234-efab-345678901234", + "AUSSRC-ICRAR", + vec![make_storage( + "st2", + "AUSSRC_ICRAR_XRD", + vec![make_area(aussrc_area_id, "AUSSRC_ICRAR_XRD")], + )], + )], ), ]; let map = get_all_node_storage_areas(&nodes); assert_eq!(map.len(), 2); - assert_eq!(map.get(RAL_AREA_ID).unwrap().0, "UKSRC"); + assert_eq!(map.get(VALLESMARINERIS_AREA_ID).unwrap().0, "MARSSRC"); assert_eq!(map.get(aussrc_area_id).unwrap().0, "AUSSRC"); } @@ -347,17 +408,36 @@ mod tests { fn get_all_node_storage_areas_later_node_wins_on_duplicate_id() { let nodes = vec![ make_node( - "UKSRC", - vec![make_site(RAL_SITE_ID, "UKSRC-RAL", vec![make_storage("st1", "UKSRC_RAL_XRD", vec![make_area(RAL_AREA_ID, "UKSRC_RAL_XRD")])])], + "MARSSRC", + vec![make_site( + VALLESMARINERIS_SITE_ID, + "MARSSRC-VALLESMARINERIS", + vec![make_storage( + "st1", + "MARSSRC_VALLESMARINERIS_XRD", + vec![make_area( + VALLESMARINERIS_AREA_ID, + "MARSSRC_VALLESMARINERIS_XRD", + )], + )], + )], ), make_node( "AUSSRC", - vec![make_site("e5f6a7b8-c9d0-1234-efab-345678901234", "AUSSRC-ICRAR", vec![make_storage("st2", "AUSSRC_ICRAR_XRD", vec![make_area(RAL_AREA_ID, "AUSSRC_ICRAR_XRD")])])], + vec![make_site( + "e5f6a7b8-c9d0-1234-efab-345678901234", + "AUSSRC-ICRAR", + vec![make_storage( + "st2", + "AUSSRC_ICRAR_XRD", + vec![make_area(VALLESMARINERIS_AREA_ID, "AUSSRC_ICRAR_XRD")], + )], + )], ), ]; let map = get_all_node_storage_areas(&nodes); assert_eq!(map.len(), 1); - assert_eq!(map.get(RAL_AREA_ID).unwrap().0, "AUSSRC"); + assert_eq!(map.get(VALLESMARINERIS_AREA_ID).unwrap().0, "AUSSRC"); } // --- DataLocation deserialisation --- @@ -365,15 +445,21 @@ mod tests { #[test] fn data_location_deserialises_from_json() { let json = r#"{ - "identifier": "UKSRC-CAM-T0", + "identifier": "MARSSRC-OLYMPUSMONS-T0", "associated_storage_area_id": "2a73d212-8793-4011-a687-cad99841c269", - "replicas": ["davs://xrootd01.cam.uksrc.org:1094/skadata/daac/08/06/random10MiB.bin"], + "replicas": ["davs://xrootd01.olympusmons.marssrc.org:1094/skadata/daac/08/06/random10MiB.bin"], "is_dataset": false }"#; let loc: DataLocation = serde_json::from_str(json).unwrap(); - assert_eq!(loc.identifier, "UKSRC-CAM-T0"); - assert_eq!(loc.associated_storage_area_id, "2a73d212-8793-4011-a687-cad99841c269"); - assert_eq!(loc.replicas[0], "davs://xrootd01.cam.uksrc.org:1094/skadata/daac/08/06/random10MiB.bin"); + assert_eq!(loc.identifier, "MARSSRC-OLYMPUSMONS-T0"); + assert_eq!( + loc.associated_storage_area_id, + "2a73d212-8793-4011-a687-cad99841c269" + ); + assert_eq!( + loc.replicas[0], + "davs://xrootd01.olympusmons.marssrc.org:1094/skadata/daac/08/06/random10MiB.bin" + ); assert!(!loc.is_dataset); } @@ -385,14 +471,14 @@ mod tests { "id": "ce04d165-4d5f-4380-a674-2a9ae4aba75e", "type": "rse", "relative_path": "/", - "name": "UKSRC_RAL_XRD", + "name": "MARSSRC_VALLESMARINERIS_XRD", "tier": 1 }"#; let area: StorageArea = serde_json::from_str(json).unwrap(); assert_eq!(area.id, "ce04d165-4d5f-4380-a674-2a9ae4aba75e"); assert_eq!(area.storage_type, "rse"); assert_eq!(area.relative_path, "/"); - assert_eq!(area.name, "UKSRC_RAL_XRD"); + assert_eq!(area.name, "MARSSRC_VALLESMARINERIS_XRD"); assert_eq!(area.tier, Some(1)); } diff --git a/src/path_finder.rs b/src/path_finder.rs index 75e9614..6a2f550 100644 --- a/src/path_finder.rs +++ b/src/path_finder.rs @@ -63,7 +63,7 @@ fn check_local_file_exists_impl(rse_path: &str, base: &str) -> bool { /// Extracts the canonical RSE path from the replica URIs in `data_locations`. /// /// Each replica URI (e.g. -/// `"davs://xrootd01.cam.uksrc.org:1094/skadata/ska:ska-sdp/eb-m001/data.fits"`) +/// `"davs://xrootd01.olympusmons.marssrc.org:1094/skadata/ska:ska-sdp/eb-m001/data.fits"`) /// is searched for a `//…` suffix. The suffix becomes the RSE /// path that is later passed to [`mount_data`]. /// @@ -134,7 +134,12 @@ pub fn extract_rse_path( /// Prints progress messages to stdout before and after the mount syscall. pub fn mount_data(rse_path: &str, namespace: &str) -> Result<()> { let sudo_user = env::var("SUDO_USER").context("SUDO_USER not set")?; - mount_data_impl(rse_path, namespace, &sudo_user, crate::mount::mount_operation) + mount_data_impl( + rse_path, + namespace, + &sudo_user, + crate::mount::mount_operation, + ) } /// Inner implementation of [`mount_data`] with an injectable `mount_fn` and @@ -211,7 +216,7 @@ mod tests { let locations = vec![make_location( "MARSSRC-OLYMPUSMONS-T0", OLYMPUSMONS_AREA_ID, - &["davs://xrootd01.example.org:1094/skadata/ska:ns/data.fits"], + &["davs://xrootd01.olympusmons.marssrc.org:1094/skadata/ska:ns/data.fits"], )]; // We exercise the enriched branch; the test passes if no panic occurs. print_data_locations_with_sites(&stores, &locations); @@ -235,10 +240,7 @@ mod tests { std::fs::create_dir_all(full.parent().unwrap()).unwrap(); std::fs::write(&full, b"").unwrap(); - assert!(check_local_file_exists_impl( - rse_path, - base_path - )); + assert!(check_local_file_exists_impl(rse_path, base_path)); } #[test] @@ -260,7 +262,7 @@ mod tests { "MARSSRC-OLYMPUSMONS-T0", OLYMPUSMONS_AREA_ID, &[&format!( - "davs://xrootd01.example.org:1094/skadata/{ns}/data.fits" + "davs://xrootd01.olympusmons.marssrc.org:1094/skadata/{ns}/data.fits" )], )]; @@ -271,11 +273,19 @@ mod tests { #[test] fn extract_rse_path_deduplicates_identical_paths_across_replicas() { let ns = "ska:ska-sdp/eb-m001-20240101-00000"; - let uri = format!("davs://xrootd01.example.org:1094/skadata/{ns}/data.fits"); + let uri = format!("davs://xrootd01.olympusmons.marssrc.org:1094/skadata/{ns}/data.fits"); // Same logical path served from two replica URIs → should succeed. let locations = vec![ - make_location("MARSSRC-OLYMPUSMONS-T0", OLYMPUSMONS_AREA_ID, &[uri.as_str()]), - make_location("MARSSRC-OLYMPUSMONS-T1", OLYMPUSMONS_AREA_ID, &[uri.as_str()]), + make_location( + "MARSSRC-OLYMPUSMONS-T0", + OLYMPUSMONS_AREA_ID, + &[uri.as_str()], + ), + make_location( + "MARSSRC-OLYMPUSMONS-T1", + OLYMPUSMONS_AREA_ID, + &[uri.as_str()], + ), ]; let result = extract_rse_path(&locations, ns, "data.fits").unwrap(); @@ -296,7 +306,7 @@ mod tests { let locations = vec![make_location( "MARSSRC-OLYMPUSMONS-T0", OLYMPUSMONS_AREA_ID, - &["davs://xrootd01.example.org:1094/unrelated/path/data.fits"], + &["davs://xrootd01.olympusmons.marssrc.org:1094/unrelated/path/data.fits"], )]; let err = extract_rse_path(&locations, "ska:ns", "data.fits").unwrap_err(); assert!( @@ -313,14 +323,14 @@ mod tests { "MARSSRC-OLYMPUSMONS-T0", OLYMPUSMONS_AREA_ID, &[&format!( - "davs://xrootd01.example.org:1094/skadata/{ns}/v1/data.fits" + "davs://xrootd01.olympusmons.marssrc.org:1094/skadata/{ns}/v1/data.fits" )], ), make_location( "MARSSRC-OLYMPUSMONS-T1", OLYMPUSMONS_AREA_ID, &[&format!( - "davs://xrootd01.example.org:1094/skadata/{ns}/v2/data.fits" + "davs://xrootd01.olympusmons.marssrc.org:1094/skadata/{ns}/v2/data.fits" )], ), ]; @@ -341,7 +351,7 @@ mod tests { "MARSSRC-OLYMPUSMONS-T0", OLYMPUSMONS_AREA_ID, &[&format!( - "davs://xrootd01.example.org:1094/skadata/{ns}/data.fits" + "davs://xrootd01.olympusmons.marssrc.org:1094/skadata/{ns}/data.fits" )], )]; @@ -374,8 +384,8 @@ mod tests { anyhow::bail!("bindfs failed"); }; - let err = mount_data_impl("/ska:ns/data.fits", "ska:ns", "alice", failing_mount) - .unwrap_err(); + let err = + mount_data_impl("/ska:ns/data.fits", "ska:ns", "alice", failing_mount).unwrap_err(); assert!( err.to_string().contains("bindfs failed"), "unexpected error: {err}" From 0f6be7a0b2816affaa152ad155cdb825a28427f6 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Tue, 31 Mar 2026 12:53:54 +0100 Subject: [PATCH 26/27] chore(SOG-480): Make changes to remove unwanted logging --- src/main.rs | 31 +++++++++++++++---------------- src/path_finder.rs | 14 +------------- 2 files changed, 16 insertions(+), 29 deletions(-) diff --git a/src/main.rs b/src/main.rs index 6b75ead..459eefc 100644 --- a/src/main.rs +++ b/src/main.rs @@ -97,12 +97,9 @@ fn run_impl( ) -> Result<()> { client.check_namespace_available(namespace)?; - let site_storages = client.site_storage_areas()?; let data_locations = client.locate_data(namespace, file_name)?; - - print_locations(&site_storages, &data_locations); - let rse_path = extract_path(&data_locations, namespace, file_name)?; + println!( "RSE Path for file '{}' in namespace '{}': {}", file_name, namespace, rse_path @@ -110,11 +107,13 @@ fn run_impl( if !file_exists(&rse_path) { println!("\n⚠️ File not found locally! ⚠️"); + println!("Checking available storage areas at this site..."); + let site_storages = client.site_storage_areas()?; println!("\nThe file is available at the following locations:"); print_locations(&site_storages, &data_locations); println!("\nPlease ensure the data has been staged to this local site before mounting."); exit_fn(1); - return Ok(()); // unreachable in production (do_exit never returns) + return Ok(()); // unreachable in production (used for testing when exist_fn is mocked) } mount(&rse_path, namespace)?; @@ -259,34 +258,34 @@ mod tests { assert_eq!( client.check_namespace_called_with.borrow().as_deref(), Some(NS), - "check_namespace_available arg" + "check_namespace_available should be called with the provided namespace" ); assert!( - client.site_storages_called.get(), - "site_storage_areas called" + !client.site_storages_called.get(), + "site_storage_areas function should not be called in golden path" ); assert_eq!( *client.locate_data_called_with.borrow(), Some((NS.to_string(), FILE.to_string())), - "locate_data args" + "locate_data should be called with the provided namespace and file" ); // path-finder helpers called with the right args - assert_eq!(print_count.get(), 1, "print_locations called exactly once"); + assert_eq!(print_count.get(), 0, "print_locations not called"); assert_eq!( *extract_called_with.borrow(), Some((NS.to_string(), FILE.to_string())), - "extract_path args" + "extract_path should be called with the provided namespace and file" ); assert_eq!( file_exists_called_with.borrow().as_deref(), Some(RSE_PATH), - "file_exists arg" + "file_exists should be called with the extracted RSE path" ); assert_eq!( *mount_called_with.borrow(), Some((RSE_PATH.to_string(), NS.to_string())), - "mount args" + "mount should be called with the extracted RSE path and namespace" ); assert!( !exit_called.get(), @@ -323,7 +322,7 @@ mod tests { } #[test] - fn run_impl_prints_locations_twice_when_file_not_staged() { + fn run_impl_prints_locations_when_file_not_staged() { let client = MockApiClient::new_golden(); let print_count = Cell::new(0u32); @@ -341,8 +340,8 @@ mod tests { assert_eq!( print_count.get(), - 2, - "print_locations should be called twice when file not staged" + 1, + "print_locations should be called when file not staged" ); } diff --git a/src/path_finder.rs b/src/path_finder.rs index 6a2f550..8dfd84b 100644 --- a/src/path_finder.rs +++ b/src/path_finder.rs @@ -157,19 +157,7 @@ fn mount_data_impl( sudo_user: &str, mount_fn: impl Fn(&str, &str, &str) -> Result<()>, ) -> Result<()> { - println!( - "Mounting data from RSE path: {} in namespace: {}", - rse_path, namespace - ); - - mount_fn(rse_path, namespace, sudo_user)?; - - println!( - "Successfully mounted {} in namespace {}", - rse_path, namespace - ); - - Ok(()) + mount_fn(rse_path, namespace, sudo_user) } #[cfg(test)] From 9b3e3145e23a57c2999cc1e45d4b70fd3df9d8f2 Mon Sep 17 00:00:00 2001 From: Roger Duthie <343584+rjaduthie@users.noreply.github.com> Date: Tue, 31 Mar 2026 13:21:26 +0100 Subject: [PATCH 27/27] chore(SOG-480): Add CHANGELOG --- CHANGELOG.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..d17abde --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,23 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Added + +- Test coverage and GitHub action to run them + +### Changed + +- Removed unwanted logging of RSE paths +- Don't make site capabilities API call unless file isn't found in local RSE mount + +## v1.0.0 + +### Added + +- Initial Rust implementation