diff --git a/api_server_main.py b/api_server_main.py index 18562c6..1cd23a8 100644 --- a/api_server_main.py +++ b/api_server_main.py @@ -5,6 +5,8 @@ from cloud_pipelines_backend import api_router from cloud_pipelines_backend import database_ops +from cloud_pipelines_backend.instrumentation import api_tracing +from cloud_pipelines_backend.instrumentation import contextual_logging app = fastapi.FastAPI( title="Cloud Pipelines API", @@ -12,14 +14,22 @@ separate_input_output_schemas=False, ) +# Add request context middleware for automatic request_id generation +app.add_middleware(api_tracing.RequestContextMiddleware) + @app.exception_handler(Exception) def handle_error(request: fastapi.Request, exc: BaseException): exception_str = traceback.format_exception(type(exc), exc, exc.__traceback__) - return fastapi.responses.JSONResponse( + response = fastapi.responses.JSONResponse( status_code=503, content={"exception": exception_str}, ) + # Add request_id to error responses for traceability + request_id = contextual_logging.get_context_metadata("request_id") + if request_id: + response.headers["x-tangle-request-id"] = request_id + return response DEFAULT_DATABASE_URI = "sqlite:///db.sqlite" diff --git a/cloud_pipelines_backend/api_router.py b/cloud_pipelines_backend/api_router.py index 6652637..07f14cc 100644 --- a/cloud_pipelines_backend/api_router.py +++ b/cloud_pipelines_backend/api_router.py @@ -15,6 +15,7 @@ from . import component_library_api_server as components_api from . import database_ops from . import errors +from .instrumentation import contextual_logging if typing.TYPE_CHECKING: from .launchers import interfaces as launcher_interfaces @@ -95,17 +96,27 @@ def _setup_routes_internal( @app.exception_handler(errors.ItemNotFoundError) def handle_not_found_error(request: fastapi.Request, exc: errors.ItemNotFoundError): - return fastapi.responses.JSONResponse( + response = fastapi.responses.JSONResponse( status_code=404, content={"message": str(exc)}, ) + # Add request_id to error responses for traceability + request_id = contextual_logging.get_context_metadata("request_id") + if request_id: + response.headers["x-tangle-request-id"] = request_id + return response @app.exception_handler(errors.PermissionError) def handle_permission_error(request: fastapi.Request, exc: errors.PermissionError): - return fastapi.responses.JSONResponse( + response = fastapi.responses.JSONResponse( status_code=403, content={"message": str(exc)}, ) + # Add request_id to error responses for traceability + request_id = contextual_logging.get_context_metadata("request_id") + if request_id: + response.headers["x-tangle-request-id"] = request_id + return response get_user_details_dependency = fastapi.Depends(user_details_getter) diff --git a/cloud_pipelines_backend/instrumentation/api_tracing.py b/cloud_pipelines_backend/instrumentation/api_tracing.py new file mode 100644 index 0000000..1dc4d65 --- /dev/null +++ b/cloud_pipelines_backend/instrumentation/api_tracing.py @@ -0,0 +1,64 @@ +"""Request context middleware for FastAPI applications. + +This middleware automatically generates a request_id for each incoming HTTP request, +sets it in the logging context for the duration of the request, and includes it in +the response headers. +""" + +import logging +import secrets + +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.responses import Response + +from . import contextual_logging + +logger = logging.getLogger(__name__) + + +def generate_request_id() -> str: + """Generate a new request ID compatible with OpenTelemetry format. + + OpenTelemetry trace IDs are 16-byte (128-bit) values represented as + 32 hexadecimal characters (lowercase). We use the same format for + request IDs to maintain compatibility. + + Returns: + A 32-character hexadecimal string representing the request ID + """ + return secrets.token_hex(16) + + +class RequestContextMiddleware(BaseHTTPMiddleware): + """Middleware to manage request_id for each request. + + For each incoming request: + 1. Generates a new request_id (32-character hex string) + 2. Sets it in the logging context (as 'request_id' key) + 3. Adds it to the response headers as 'x-tangle-request-id' + 4. Clears it after the request completes + + This ensures all logs during the request processing include the same request_id. + """ + + async def dispatch(self, request: Request, call_next) -> Response: + """Process each request with a new request_id. + + Args: + request: The incoming HTTP request + call_next: The next middleware or route handler + + Returns: + The HTTP response with request_id in headers + """ + # Generate a new request_id for this request + request_id = generate_request_id() + + # Use generic logging_context to set request_id + with contextual_logging.logging_context(request_id=request_id): + # Process the request + response = await call_next(request) + # Add request_id to response headers for client reference + response.headers["x-tangle-request-id"] = request_id + return response diff --git a/cloud_pipelines_backend/instrumentation/contextual_logging.py b/cloud_pipelines_backend/instrumentation/contextual_logging.py new file mode 100644 index 0000000..d9414ab --- /dev/null +++ b/cloud_pipelines_backend/instrumentation/contextual_logging.py @@ -0,0 +1,127 @@ +"""Logging context management for distributed tracing and execution tracking. + +This module provides utilities for managing arbitrary metadata in the logging context. +This metadata is automatically added to all log records for better filtering and correlation. + +Common metadata keys: +- request_id: From API requests - groups all logs from a single API call +- pipeline_run_id: From PipelineRun.id - tracks the entire pipeline run +- execution_id: From ExecutionNode.id - tracks individual execution nodes +- container_execution_id: From ContainerExecution.id - tracks running containers +- user_id: User who initiated the operation +- Any other metadata you want to track in logs + +Usage: + # Set metadata in context + with logging_context(request_id="abc123", user_id="user@example.com"): + logger.info("Processing") # Both fields in logs + + # Or use individual functions + set_context_metadata("request_id", "abc123") + delete_context_metadata("request_id") # Remove a specific key +""" + +import contextvars +from contextlib import contextmanager +from typing import Any, Optional + +# Single context variable to store all metadata as a dictionary +_context_metadata: contextvars.ContextVar[dict[str, Any]] = contextvars.ContextVar( + "context_metadata", default={} +) + + +def set_context_metadata(key: str, value: Any) -> None: + """Set a metadata value in the current context. + + Args: + key: The metadata key (e.g., 'execution_id', 'request_id', 'user_id') + value: The value to set + """ + metadata = _context_metadata.get().copy() + metadata[key] = value + _context_metadata.set(metadata) + + +def delete_context_metadata(key: str) -> None: + """Delete a metadata key from the current context. + + Similar to dict.pop() but doesn't return a value. If the key doesn't exist, + this is a no-op (no error is raised). + + Args: + key: The metadata key to delete (e.g., 'execution_id', 'request_id') + """ + metadata = _context_metadata.get().copy() + metadata.pop(key, None) # Use None as default to avoid KeyError + _context_metadata.set(metadata) + + +def get_context_metadata(key: str) -> Optional[Any]: + """Get a metadata value from the current context. + + Args: + key: The metadata key to retrieve + + Returns: + The metadata value or None if not set + """ + return _context_metadata.get().get(key) + + +def get_all_context_metadata() -> dict[str, Any]: + """Get all metadata from the current context. + + Returns: + Dictionary of all context metadata + """ + return _context_metadata.get().copy() + + +def clear_context_metadata() -> None: + """Clear all metadata from the current context.""" + _context_metadata.set({}) + + +@contextmanager +def logging_context(**metadata: Any): + """Context manager for setting arbitrary metadata that is automatically cleared. + + This is the recommended way to set logging context. It ensures metadata is + always cleaned up, even if an exception occurs. + + You can pass any keyword arguments, and they will be available in log records. + Common keys include: request_id, pipeline_run_id, execution_id, container_execution_id, user_id + + Args: + **metadata: Arbitrary keyword arguments to add to the context + + Example with IDs: + >>> with logging_context(pipeline_run_id="run123", execution_id="exec456"): + ... logger.info("Processing execution") # Will include both IDs + + Example with custom metadata: + >>> with logging_context( + ... execution_id="exec456", + ... user_id="user@example.com", + ... operation="reprocessing" + ... ): + ... logger.info("Custom operation") # All metadata in logs + + Example for API requests: + >>> request_id = generate_request_id() + >>> with logging_context(request_id=request_id): + ... logger.info("Handling API request") + """ + # Store previous metadata to restore nested contexts + prev_metadata = get_all_context_metadata() + + try: + # Set all provided metadata + for key, value in metadata.items(): + if value is not None: # Only set non-None values + set_context_metadata(key, value) + yield + finally: + # Restore previous metadata + _context_metadata.set(prev_metadata) diff --git a/cloud_pipelines_backend/instrumentation/structured_logging.py b/cloud_pipelines_backend/instrumentation/structured_logging.py new file mode 100644 index 0000000..572d53e --- /dev/null +++ b/cloud_pipelines_backend/instrumentation/structured_logging.py @@ -0,0 +1,50 @@ +"""Structured logging components for context-aware log formatting. + +This module provides logging filters and formatters that integrate with the +contextual_logging module to automatically include context metadata in log records. +""" + +import logging + +from . import contextual_logging + + +class LoggingContextFilter(logging.Filter): + """Logging filter that adds contextual metadata to log records. + + This filter automatically adds metadata like execution_id and container_execution_id + to log records, making it easier to trace logs for specific executions. + """ + + def filter(self, record: logging.LogRecord) -> bool: + """Add contextual metadata to the log record.""" + for key, value in contextual_logging.get_all_context_metadata().items(): + if value is not None: + setattr(record, key, value) + return True + + +class ContextAwareFormatter(logging.Formatter): + """Formatter that dynamically includes context fields only when they're set.""" + + def format(self, record: logging.LogRecord) -> str: + """Format log record with dynamic context fields.""" + # Base format + base_format = "%(asctime)s [%(levelname)s] %(name)s" + + # Collect context fields that are present + context_parts = [] + context_metadata = contextual_logging.get_all_context_metadata() + for key, value in context_metadata.items(): + if value is not None and hasattr(record, key): + context_parts.append(f"{key}={value}") + + # Add context to format if any exists + if context_parts: + base_format += " [" + " ".join(context_parts) + "]" + + base_format += ": %(message)s" + + # Create formatter with the dynamic format + formatter = logging.Formatter(base_format) + return formatter.format(record) diff --git a/cloud_pipelines_backend/orchestrator_sql.py b/cloud_pipelines_backend/orchestrator_sql.py index e81f09a..3d5c211 100644 --- a/cloud_pipelines_backend/orchestrator_sql.py +++ b/cloud_pipelines_backend/orchestrator_sql.py @@ -21,6 +21,8 @@ from .launchers import common_annotations from .launchers import interfaces as launcher_interfaces +# Import contextual_logging for execution ID tracking in logs +from .instrumentation import contextual_logging _logger = logging.getLogger(__name__) @@ -94,25 +96,26 @@ def internal_process_queued_executions_queue(self, session: orm.Session): queued_execution = session.scalar(query) if queued_execution: self._queued_executions_queue_idle = False - start_timestamp = time.monotonic_ns() - _logger.info(f"Before processing {queued_execution.id=}") - try: - self.internal_process_one_queued_execution( - session=session, execution=queued_execution - ) - except Exception as ex: - _logger.exception(f"Error processing {queued_execution.id=}") - session.rollback() - queued_execution.container_execution_status = ( - bts.ContainerExecutionStatus.SYSTEM_ERROR - ) - record_system_error_exception(execution=queued_execution, exception=ex) - session.commit() - finally: - duration_ms = int((time.monotonic_ns() - start_timestamp) / 1_000_000) - _logger.info( - f"After processing {queued_execution.id=}. Duration={duration_ms}ms" - ) + + # Set execution context for logging + with contextual_logging.logging_context(execution_id=queued_execution.id): + _logger.info("Before processing queued execution") + try: + self.internal_process_one_queued_execution( + session=session, execution=queued_execution + ) + except Exception as ex: + _logger.exception("Error processing queued execution") + session.rollback() + queued_execution.container_execution_status = ( + bts.ContainerExecutionStatus.SYSTEM_ERROR + ) + record_system_error_exception( + execution=queued_execution, exception=ex + ) + session.commit() + _logger.info("After processing queued execution") + return True else: if not self._queued_executions_queue_idle: @@ -137,42 +140,46 @@ def internal_process_running_executions_queue(self, session: orm.Session): running_container_execution = session.scalar(query) if running_container_execution: self._running_executions_queue_idle = False - start_timestamp = time.monotonic_ns() - _logger.info(f"Before processing {running_container_execution.id=}") - try: - self.internal_process_one_running_execution( - session=session, container_execution=running_container_execution - ) - except Exception as ex: - _logger.exception(f"Error processing {running_container_execution.id=}") - session.rollback() - running_container_execution.status = ( - bts.ContainerExecutionStatus.SYSTEM_ERROR - ) - # Doing an intermediate commit here because it's most important to mark the problematic execution as SYSTEM_ERROR. - session.commit() - # Mark our ExecutionNode as SYSTEM_ERROR - execution_nodes = running_container_execution.execution_nodes - for execution_node in execution_nodes: - execution_node.container_execution_status = ( - bts.ContainerExecutionStatus.SYSTEM_ERROR - ) - record_system_error_exception( - execution=execution_node, exception=ex + + # Set execution context for logging (includes container_execution_id) + # Get first execution_node_id for context (there may be multiple nodes using same container) + execution_nodes = running_container_execution.execution_nodes + execution_node_id = execution_nodes[0].id if execution_nodes else None + + with contextual_logging.logging_context( + execution_node_id=execution_node_id, + container_execution_id=running_container_execution.id, + ): + _logger.info("Before processing running container execution") + try: + self.internal_process_one_running_execution( + session=session, container_execution=running_container_execution ) - # Doing an intermediate commit here because it's most important to mark the problematic node as SYSTEM_ERROR. - session.commit() - # Skip downstream executions - for execution_node in execution_nodes: - _mark_all_downstream_executions_as_skipped( - session=session, execution=execution_node + except Exception as ex: + _logger.exception("Error processing running container execution") + session.rollback() + running_container_execution.status = ( + bts.ContainerExecutionStatus.SYSTEM_ERROR ) - session.commit() - finally: - duration_ms = int((time.monotonic_ns() - start_timestamp) / 1_000_000) - _logger.info( - f"After processing {running_container_execution.id=}. Duration={duration_ms}ms" - ) + # Doing an intermediate commit here because it's most important to mark the problematic execution as SYSTEM_ERROR. + session.commit() + # Mark our ExecutionNode as SYSTEM_ERROR + for execution_node in execution_nodes: + execution_node.container_execution_status = ( + bts.ContainerExecutionStatus.SYSTEM_ERROR + ) + record_system_error_exception( + execution=execution_node, exception=ex + ) + # Doing an intermediate commit here because it's most important to mark the problematic node as SYSTEM_ERROR. + session.commit() + # Skip downstream executions + for execution_node in execution_nodes: + _mark_all_downstream_executions_as_skipped( + session=session, execution=execution_node + ) + session.commit() + _logger.info("After processing running container execution") return True else: if not self._running_executions_queue_idle: @@ -286,7 +293,7 @@ def internal_process_one_queued_execution( # There must be at least one SUCCEEDED/RUNNING/PENDING since non_purged_candidates is non-empty. old_execution = non_purged_candidates[-1] _logger.info( - f"Execution {execution.id=} will reuse the {old_execution.id=} with " + f"Reusing cached execution node {old_execution.id} with " f"{old_execution.container_execution_id=}, {old_execution.container_execution_status=}" ) # Reusing the execution: @@ -589,19 +596,15 @@ def internal_process_one_running_execution( terminated = False if votes_to_not_terminate: _logger.info( - f"Not terminating container execution {container_execution.id=} since some other executions ({[execution_node.id for execution_node in votes_to_not_terminate]}) are still using it." + f"Not terminating container execution since some other executions ({[execution_node.id for execution_node in votes_to_not_terminate]}) are still using it." ) else: - _logger.info( - f"Terminating container execution {container_execution.id}." - ) + _logger.info("Terminating container execution.") # We should preserve the logs before terminating/deleting the container try: _retry(lambda: launched_container.upload_log()) except: - _logger.exception( - f"Error uploading logs for {container_execution.id=} before termination." - ) + _logger.exception("Error uploading logs before termination.") # Requesting container termination. # Termination might not happen immediately (e.g. Kubernetes has grace period). launched_container.terminate() @@ -613,7 +616,7 @@ def internal_process_one_running_execution( # Mark the execution nodes as cancelled only after the launched container is successfully terminated (if needed) for execution_node in votes_to_terminate: _logger.info( - f"Cancelling execution {execution_node.id} ({container_execution.id=}) and skipping all downstream executions." + f"Cancelling execution {execution_node.id} and skipping all downstream executions." ) execution_node.container_execution_status = ( bts.ContainerExecutionStatus.CANCELLED @@ -641,24 +644,22 @@ def internal_process_one_running_execution( reloaded_launched_container.status ) if new_status == previous_status: - _logger.info( - f"Container execution {container_execution.id} remains in {new_status} state." - ) + _logger.info(f"Container execution remains in {new_status} state.") return _logger.info( - f"Container execution {container_execution.id} is now in state {new_status} (was {previous_status})." + f"Container execution is now in state {new_status} (was {previous_status})." ) session.rollback() container_execution.updated_at = current_time execution_nodes = container_execution.execution_nodes if not execution_nodes: raise OrchestratorError( - f"Could not find ExecutionNode associated with ContainerExecution. {container_execution.id=}" + f"Could not find ExecutionNode associated with ContainerExecution." ) if len(execution_nodes) > 1: execution_node_ids = [execution.id for execution in execution_nodes] _logger.warning( - f"ContainerExecution is associated with multiple ExecutionNodes: {container_execution.id=}, {execution_node_ids=}" + f"ContainerExecution is associated with multiple ExecutionNodes: {execution_node_ids=}" ) if new_status == launcher_interfaces.ContainerStatus.RUNNING: @@ -727,7 +728,7 @@ def _maybe_preload_value( if missing_output_names: # Marking the container execution as FAILED (even though the program itself has completed successfully) container_execution.status = bts.ContainerExecutionStatus.FAILED - orchestration_error_message = f"Container execution {container_execution.id} is marked as FAILED due to missing outputs: {missing_output_names}." + orchestration_error_message = f"Container execution is marked as FAILED due to missing outputs: {missing_output_names}." _logger.error(orchestration_error_message) _record_orchestration_error_message( container_execution=container_execution, @@ -828,7 +829,7 @@ def _maybe_preload_value( ) else: _logger.error( - f"Container execution {container_execution.id} is now in unexpected state {new_status}. System error. {container_execution=}" + f"Container execution is now in unexpected state {new_status}. System error. {container_execution=}" ) # This SYSTEM_ERROR will be handled by the outer exception handler raise OrchestratorError( diff --git a/pyproject.toml b/pyproject.toml index 5f68f78..75ae905 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ dependencies = [ [dependency-groups] dev = [ + "black>=26.1.0", "pytest>=8.4.2", ] huggingface = [ diff --git a/start_local.py b/start_local.py index e98ae69..844ec30 100644 --- a/start_local.py +++ b/start_local.py @@ -78,19 +78,29 @@ def get_user_details(request: fastapi.Request): # region: Logging configuration import logging.config +from cloud_pipelines_backend.instrumentation import structured_logging LOGGING_CONFIG = { "version": 1, "disable_existing_loggers": True, "formatters": { "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"}, + "with_context": { + "()": structured_logging.ContextAwareFormatter, + }, + }, + "filters": { + "context_filter": { + "()": structured_logging.LoggingContextFilter, + }, }, "handlers": { "default": { "level": "INFO", - "formatter": "standard", + "formatter": "with_context", "class": "logging.StreamHandler", "stream": "ext://sys.stderr", + "filters": ["context_filter"], }, }, "loggers": { @@ -205,6 +215,8 @@ def run_orchestrator( from cloud_pipelines_backend import api_router from cloud_pipelines_backend import database_ops +from cloud_pipelines_backend.instrumentation import api_tracing +from cloud_pipelines_backend.instrumentation import contextual_logging @contextlib.asynccontextmanager @@ -230,14 +242,22 @@ async def lifespan(app: fastapi.FastAPI): lifespan=lifespan, ) +# Add request context middleware for automatic request_id generation +app.add_middleware(api_tracing.RequestContextMiddleware) + @app.exception_handler(Exception) def handle_error(request: fastapi.Request, exc: BaseException): exception_str = traceback.format_exception(type(exc), exc, exc.__traceback__) - return fastapi.responses.JSONResponse( + response = fastapi.responses.JSONResponse( status_code=503, content={"exception": exception_str}, ) + # Add request_id to error responses for traceability + request_id = contextual_logging.get_context_metadata("request_id") + if request_id: + response.headers["x-tangle-request-id"] = request_id + return response api_router.setup_routes( diff --git a/tests/test_context_aware_formatter_exception.py b/tests/test_context_aware_formatter_exception.py new file mode 100644 index 0000000..75e7375 --- /dev/null +++ b/tests/test_context_aware_formatter_exception.py @@ -0,0 +1,187 @@ +""" +Test to demonstrate how logger.exception() behaves with ContextAwareFormatter. + +This test shows that the ContextAwareFormatter properly handles exception logging +including full tracebacks. +""" + +import logging +import logging.config +import io +import sys +import os + +# Add the parent directory to the path so we can import the module +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from cloud_pipelines_backend.instrumentation import contextual_logging +from cloud_pipelines_backend.instrumentation.structured_logging import ( + ContextAwareFormatter, + LoggingContextFilter, +) + + +def test_logger_exception_with_context_aware_formatter(): + """Test that logger.exception() properly includes tracebacks with ContextAwareFormatter.""" + + # Create a string buffer to capture log output + log_buffer = io.StringIO() + + # Configure logging with ContextAwareFormatter + LOGGING_CONFIG = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "with_context": { + "()": ContextAwareFormatter, + }, + }, + "filters": { + "context_filter": { + "()": LoggingContextFilter, + }, + }, + "handlers": { + "test_handler": { + "level": "DEBUG", + "formatter": "with_context", + "class": "logging.StreamHandler", + "stream": log_buffer, + "filters": ["context_filter"], + }, + }, + "loggers": { + "test_logger": { + "level": "DEBUG", + "handlers": ["test_handler"], + "propagate": False, + }, + }, + } + + logging.config.dictConfig(LOGGING_CONFIG) + test_logger = logging.getLogger("test_logger") + + print("\n" + "=" * 80) + print("TEST 1: logger.exception() WITHOUT context") + print("=" * 80) + + try: + # Simulate an error + result = 1 / 0 + except ZeroDivisionError: + test_logger.exception("An error occurred while dividing") + + output1 = log_buffer.getvalue() + print(output1) + + # Verify the traceback is present + assert "Traceback" in output1, "Traceback should be present in exception logs" + assert "ZeroDivisionError" in output1, "Exception type should be in logs" + assert "division by zero" in output1, "Exception message should be in logs" + + # Clear buffer for next test + log_buffer.truncate(0) + log_buffer.seek(0) + + print("\n" + "=" * 80) + print("TEST 2: logger.exception() WITH context metadata") + print("=" * 80) + + # Add some context metadata + with contextual_logging.logging_context( + execution_id="exec-12345", + container_execution_id="container-67890", + pipeline_run_id="run-abc123", + ): + try: + # Simulate another error with nested function calls + def inner_function(): + return {"key": "value"}[NonExistentKey] # noqa: F821 + + def outer_function(): + inner_function() + + outer_function() + except Exception: + test_logger.exception("Failed to process pipeline") + + output2 = log_buffer.getvalue() + print(output2) + + # Verify context is included + assert "execution_id=exec-12345" in output2, "execution_id should be in logs" + assert ( + "container_execution_id=container-67890" in output2 + ), "container_execution_id should be in logs" + assert "pipeline_run_id=run-abc123" in output2, "pipeline_run_id should be in logs" + + # Verify the traceback is still present with context + assert "Traceback" in output2, "Traceback should be present even with context" + assert "NameError" in output2, "Exception type should be in logs" + assert ( + "inner_function" in output2 + ), "Function names from traceback should be present" + assert ( + "outer_function" in output2 + ), "Function names from traceback should be present" + + # Clear buffer for next test + log_buffer.truncate(0) + log_buffer.seek(0) + + print("\n" + "=" * 80) + print("TEST 3: logger.error() vs logger.exception() comparison") + print("=" * 80) + + with contextual_logging.logging_context(execution_id="exec-99999"): + try: + raise ValueError("This is a test error") + except ValueError: + test_logger.error("Error logged with .error()") + + output3 = log_buffer.getvalue() + print("logger.error() output:") + print(output3) + print() + + # Verify .error() does NOT include traceback + assert "Traceback" not in output3, "logger.error() should NOT include traceback" + assert ( + "ValueError" not in output3 + ), "logger.error() should NOT include exception details" + + # Clear buffer + log_buffer.truncate(0) + log_buffer.seek(0) + + with contextual_logging.logging_context(execution_id="exec-99999"): + try: + raise ValueError("This is a test error") + except ValueError: + test_logger.exception("Error logged with .exception()") + + output4 = log_buffer.getvalue() + print("logger.exception() output:") + print(output4) + + # Verify .exception() DOES include traceback + assert "Traceback" in output4, "logger.exception() SHOULD include traceback" + assert ( + "ValueError" in output4 + ), "logger.exception() SHOULD include exception details" + + print("\n" + "=" * 80) + print("SUMMARY") + print("=" * 80) + print("✅ ContextAwareFormatter properly handles logger.exception()") + print("✅ Full tracebacks are included in exception logs") + print("✅ Context metadata is preserved alongside tracebacks") + print( + "✅ The formatter delegates to logging.Formatter.format() which handles exc_info" + ) + print("=" * 80) + + +if __name__ == "__main__": + test_logger_exception_with_context_aware_formatter() diff --git a/tests/test_instrumentation_logging_context.py b/tests/test_instrumentation_logging_context.py new file mode 100644 index 0000000..dfd5959 --- /dev/null +++ b/tests/test_instrumentation_logging_context.py @@ -0,0 +1,175 @@ +"""Tests for the logging_context module in instrumentation.""" + +import pytest +from cloud_pipelines_backend.instrumentation import contextual_logging +from cloud_pipelines_backend.instrumentation.api_tracing import generate_request_id + + +class TestLoggingContext: + """Tests for logging context management.""" + + def setup_method(self): + """Clear any existing context before each test.""" + contextual_logging.clear_context_metadata() + + def teardown_method(self): + """Clear context after each test.""" + contextual_logging.clear_context_metadata() + + def test_set_and_get_context_metadata(self): + """Test setting and getting context metadata.""" + test_id = "abc123def456abc123def456abc12345" + + contextual_logging.set_context_metadata("request_id", test_id) + + assert contextual_logging.get_context_metadata("request_id") == test_id + + def test_get_context_metadata_returns_none_when_not_set(self): + """Test that get_context_metadata returns None when key is not set.""" + assert contextual_logging.get_context_metadata("request_id") is None + + def test_clear_context_metadata(self): + """Test clearing all context metadata.""" + contextual_logging.set_context_metadata("request_id", "test123") + contextual_logging.set_context_metadata("execution_id", "exec456") + contextual_logging.clear_context_metadata() + + assert contextual_logging.get_context_metadata("request_id") is None + assert contextual_logging.get_context_metadata("execution_id") is None + + def test_overwrite_context_metadata(self): + """Test that setting a new value overwrites the old one.""" + contextual_logging.set_context_metadata("request_id", "first_id") + contextual_logging.set_context_metadata("request_id", "second_id") + + assert contextual_logging.get_context_metadata("request_id") == "second_id" + + def test_get_all_context_metadata(self): + """Test getting all context metadata at once.""" + contextual_logging.set_context_metadata("request_id", "req123") + contextual_logging.set_context_metadata("execution_id", "exec456") + contextual_logging.set_context_metadata("custom_field", "value789") + + all_metadata = contextual_logging.get_all_context_metadata() + + assert all_metadata["request_id"] == "req123" + assert all_metadata["execution_id"] == "exec456" + assert all_metadata["custom_field"] == "value789" + + +class TestLoggingContextManager: + """Tests for the logging_context context manager.""" + + def setup_method(self): + """Clear any existing context before each test.""" + contextual_logging.clear_context_metadata() + + def teardown_method(self): + """Clear context after each test.""" + contextual_logging.clear_context_metadata() + + def test_context_manager_sets_and_restores_metadata(self): + """Test that context manager sets metadata on enter and restores on exit.""" + test_id = "context_test_123" + + assert contextual_logging.get_context_metadata("request_id") is None + + with contextual_logging.logging_context(request_id=test_id): + assert contextual_logging.get_context_metadata("request_id") == test_id + + assert contextual_logging.get_context_metadata("request_id") is None + + def test_context_manager_with_multiple_keys(self): + """Test that context manager handles multiple metadata keys.""" + with contextual_logging.logging_context( + request_id="req123", execution_id="exec456", pipeline_run_id="run789" + ): + assert contextual_logging.get_context_metadata("request_id") == "req123" + assert contextual_logging.get_context_metadata("execution_id") == "exec456" + assert ( + contextual_logging.get_context_metadata("pipeline_run_id") == "run789" + ) + + assert contextual_logging.get_context_metadata("request_id") is None + assert contextual_logging.get_context_metadata("execution_id") is None + assert contextual_logging.get_context_metadata("pipeline_run_id") is None + + def test_context_manager_with_none_values(self): + """Test that context manager skips None values.""" + with contextual_logging.logging_context(request_id="req123", execution_id=None): + assert contextual_logging.get_context_metadata("request_id") == "req123" + assert contextual_logging.get_context_metadata("execution_id") is None + + assert contextual_logging.get_context_metadata("request_id") is None + + def test_context_manager_clears_on_exception(self): + """Test that context manager restores metadata even when exception occurs.""" + test_id = "exception_test" + + with pytest.raises(ValueError): + with contextual_logging.logging_context(request_id=test_id): + assert contextual_logging.get_context_metadata("request_id") == test_id + raise ValueError("Test exception") + + # Metadata should be cleared even after exception + assert contextual_logging.get_context_metadata("request_id") is None + + def test_context_manager_nested(self): + """Test nested context managers.""" + outer_id = "outer_id" + inner_id = "inner_id" + + with contextual_logging.logging_context(request_id=outer_id): + assert contextual_logging.get_context_metadata("request_id") == outer_id + + with contextual_logging.logging_context(request_id=inner_id): + assert contextual_logging.get_context_metadata("request_id") == inner_id + + # After inner context exits, outer context is restored + assert contextual_logging.get_context_metadata("request_id") == outer_id + + assert contextual_logging.get_context_metadata("request_id") is None + + def test_context_manager_with_generated_request_id(self): + """Test using context manager with a generated request_id.""" + generated_id = generate_request_id() + + with contextual_logging.logging_context(request_id=generated_id): + assert contextual_logging.get_context_metadata("request_id") == generated_id + assert len(contextual_logging.get_context_metadata("request_id")) == 32 + + assert contextual_logging.get_context_metadata("request_id") is None + + def test_context_manager_multiple_sequential_uses(self): + """Test using context manager multiple times sequentially.""" + ids = ["id1", "id2", "id3"] + + for test_id in ids: + with contextual_logging.logging_context(request_id=test_id): + assert contextual_logging.get_context_metadata("request_id") == test_id + assert contextual_logging.get_context_metadata("request_id") is None + + def test_context_manager_preserves_existing_metadata(self): + """Test that nested context preserves existing metadata not being overwritten.""" + with contextual_logging.logging_context( + request_id="req123", execution_id="exec456" + ): + assert contextual_logging.get_context_metadata("request_id") == "req123" + assert contextual_logging.get_context_metadata("execution_id") == "exec456" + + # Inner context only sets pipeline_run_id + with contextual_logging.logging_context(pipeline_run_id="run789"): + # Previous values should still be accessible + assert contextual_logging.get_context_metadata("request_id") == "req123" + assert ( + contextual_logging.get_context_metadata("execution_id") == "exec456" + ) + assert ( + contextual_logging.get_context_metadata("pipeline_run_id") + == "run789" + ) + + # After inner exits, pipeline_run_id is gone but others remain + assert contextual_logging.get_context_metadata("request_id") == "req123" + assert contextual_logging.get_context_metadata("execution_id") == "exec456" + assert contextual_logging.get_context_metadata("pipeline_run_id") is None diff --git a/tests/test_instrumentation_request_middleware.py b/tests/test_instrumentation_request_middleware.py new file mode 100644 index 0000000..70dfeb3 --- /dev/null +++ b/tests/test_instrumentation_request_middleware.py @@ -0,0 +1,327 @@ +"""Tests for the request_middleware module in instrumentation.""" + +import pytest +from unittest.mock import AsyncMock, MagicMock +from starlette.requests import Request +from starlette.responses import Response +from starlette.applications import Starlette +from starlette.testclient import TestClient + +from cloud_pipelines_backend.instrumentation import contextual_logging +from cloud_pipelines_backend.instrumentation.api_tracing import ( + RequestContextMiddleware, + generate_request_id, +) + + +class TestRequestIdGeneration: + """Tests for request_id generation.""" + + def test_generate_request_id_returns_32_char_hex(self): + """Test that generated request_id is 32 hexadecimal characters.""" + request_id = generate_request_id() + + assert len(request_id) == 32 + assert all(c in "0123456789abcdef" for c in request_id) + + def test_generate_request_id_is_unique(self): + """Test that each generated request_id is unique.""" + request_ids = {generate_request_id() for _ in range(100)} + + # All 100 should be unique + assert len(request_ids) == 100 + + def test_generate_request_id_is_lowercase(self): + """Test that generated request_id uses lowercase hex.""" + request_id = generate_request_id() + + assert request_id == request_id.lower() + + +class TestRequestIdFormatting: + """Tests for request_id format validation.""" + + def test_generated_request_id_format(self): + """Test that generated request_id matches expected format.""" + request_id = generate_request_id() + + # Should be 32 characters + assert len(request_id) == 32 + + # Should be valid hex + try: + int(request_id, 16) + except ValueError: + pytest.fail("request_id is not valid hexadecimal") + + # Should be lowercase + assert request_id.islower() + + def test_request_id_is_128_bits(self): + """Test that request_id represents 128 bits (16 bytes).""" + request_id = generate_request_id() + + # 32 hex characters = 16 bytes = 128 bits + assert len(bytes.fromhex(request_id)) == 16 + + +class TestRequestContextMiddleware: + """Tests for RequestContextMiddleware.""" + + def setup_method(self): + """Clear any existing context before each test.""" + contextual_logging.clear_context_metadata() + + def teardown_method(self): + """Clear context after each test.""" + contextual_logging.clear_context_metadata() + + def test_middleware_generates_request_id(self): + """Test that middleware generates a request_id for each request.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + request_ids_seen = [] + + @app.route("/test") + def test_route(request): + # Capture the request_id during request processing + request_ids_seen.append( + contextual_logging.get_context_metadata("request_id") + ) + return Response("ok") + + client = TestClient(app) + response = client.get("/test") + + assert response.status_code == 200 + assert len(request_ids_seen) == 1 + assert request_ids_seen[0] is not None + assert len(request_ids_seen[0]) == 32 + + def test_middleware_adds_request_id_to_response_headers(self): + """Test that middleware adds request_id to response headers.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + @app.route("/test") + def test_route(request): + return Response("ok") + + client = TestClient(app) + response = client.get("/test") + + assert "x-tangle-request-id" in response.headers + request_id = response.headers["x-tangle-request-id"] + assert len(request_id) == 32 + assert all(c in "0123456789abcdef" for c in request_id) + + def test_middleware_clears_request_id_after_request(self): + """Test that middleware clears request_id after request completes.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + @app.route("/test") + def test_route(request): + assert contextual_logging.get_context_metadata("request_id") is not None + return Response("ok") + + client = TestClient(app) + + # Before request + assert contextual_logging.get_context_metadata("request_id") is None + + # Make request + response = client.get("/test") + assert response.status_code == 200 + + # After request - Note: in test client, context might not be cleared + # the same way as in production, but the middleware's context manager ensures it + + def test_middleware_generates_unique_request_ids(self): + """Test that middleware generates unique request_ids for each request.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + @app.route("/test") + def test_route(request): + return Response("ok") + + client = TestClient(app) + + # Make multiple requests + request_ids = set() + for _ in range(10): + response = client.get("/test") + request_ids.add(response.headers["x-tangle-request-id"]) + + # All request_ids should be unique + assert len(request_ids) == 10 + + def test_middleware_request_id_available_in_route(self): + """Test that request_id set by middleware is available in route handler.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + captured_request_id = None + + @app.route("/test") + def test_route(request): + nonlocal captured_request_id + captured_request_id = contextual_logging.get_context_metadata("request_id") + return Response(f"request_id: {captured_request_id}") + + client = TestClient(app) + response = client.get("/test") + + assert captured_request_id is not None + assert captured_request_id == response.headers["x-tangle-request-id"] + assert captured_request_id in response.text + + def test_middleware_handles_exception_in_route(self): + """Test that middleware clears request_id even when route raises exception.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + @app.route("/test") + def test_route(request): + request_id_during_exception = contextual_logging.get_context_metadata( + "request_id" + ) + assert request_id_during_exception is not None + raise ValueError("Test exception") + + client = TestClient(app, raise_server_exceptions=False) + response = client.get("/test") + + # Even though route raised exception, response should have request_id header + # (middleware's context manager ensures cleanup) + assert response.status_code == 500 + + def test_middleware_with_multiple_routes(self): + """Test middleware works correctly with multiple routes.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + request_ids_by_route = {} + + @app.route("/route1") + def route1(request): + request_ids_by_route["route1"] = contextual_logging.get_context_metadata( + "request_id" + ) + return Response("route1") + + @app.route("/route2") + def route2(request): + request_ids_by_route["route2"] = contextual_logging.get_context_metadata( + "request_id" + ) + return Response("route2") + + client = TestClient(app) + + response1 = client.get("/route1") + response2 = client.get("/route2") + + # Each route should have gotten a request_id + assert request_ids_by_route["route1"] is not None + assert request_ids_by_route["route2"] is not None + + # They should be different + assert request_ids_by_route["route1"] != request_ids_by_route["route2"] + + # Response headers should match + assert ( + response1.headers["x-tangle-request-id"] == request_ids_by_route["route1"] + ) + assert ( + response2.headers["x-tangle-request-id"] == request_ids_by_route["route2"] + ) + + +class TestRequestContextMiddlewareIntegration: + """Integration tests for RequestContextMiddleware with logging.""" + + def setup_method(self): + """Clear any existing context before each test.""" + contextual_logging.clear_context_metadata() + + def teardown_method(self): + """Clear context after each test.""" + contextual_logging.clear_context_metadata() + + def test_middleware_enables_request_id_in_logs(self): + """Test that middleware enables request_id to be used in logging.""" + import logging + + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + logged_request_ids = [] + + # Create a custom handler to capture log records + class TestHandler(logging.Handler): + def emit(self, record): + # In real usage, LoggingContextFilter would add request_id to logs + current_request_id = contextual_logging.get_context_metadata( + "request_id" + ) + if current_request_id: + logged_request_ids.append(current_request_id) + + logger = logging.getLogger("test_logger") + handler = TestHandler() + logger.addHandler(handler) + logger.setLevel(logging.INFO) + + @app.route("/test") + def test_route(request): + logger.info("Processing request") + return Response("ok") + + client = TestClient(app) + response = client.get("/test") + + # The request_id logged should match the response header + assert len(logged_request_ids) > 0 + assert response.headers["x-tangle-request-id"] in logged_request_ids + + # Cleanup + logger.removeHandler(handler) + + def test_middleware_request_id_persists_across_function_calls(self): + """Test that request_id persists across function calls within a request.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + request_ids_collected = [] + + def helper_function(): + """Helper function that accesses request_id.""" + request_ids_collected.append( + contextual_logging.get_context_metadata("request_id") + ) + + @app.route("/test") + def test_route(request): + request_ids_collected.append( + contextual_logging.get_context_metadata("request_id") + ) + helper_function() + request_ids_collected.append( + contextual_logging.get_context_metadata("request_id") + ) + return Response("ok") + + client = TestClient(app) + response = client.get("/test") + + # All three captures should have the same request_id + assert len(request_ids_collected) == 3 + assert ( + request_ids_collected[0] + == request_ids_collected[1] + == request_ids_collected[2] + ) + assert request_ids_collected[0] == response.headers["x-tangle-request-id"] diff --git a/tests/test_request_id_concurrency.py b/tests/test_request_id_concurrency.py new file mode 100644 index 0000000..97dce6c --- /dev/null +++ b/tests/test_request_id_concurrency.py @@ -0,0 +1,252 @@ +"""Test that request_id works correctly with concurrent requests.""" + +import asyncio +import pytest +from starlette.applications import Starlette +from starlette.responses import JSONResponse +from starlette.testclient import TestClient + +from cloud_pipelines_backend.instrumentation import contextual_logging +from cloud_pipelines_backend.instrumentation.api_tracing import RequestContextMiddleware + + +def test_request_id_isolation_with_concurrent_requests(): + """Test that each concurrent request gets its own isolated request_id.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + # Store request_ids seen by each endpoint + request_ids_seen = { + "endpoint1": [], + "endpoint2": [], + } + + @app.route("/endpoint1") + async def endpoint1(request): + request_id = contextual_logging.get_context_metadata("request_id") + request_ids_seen["endpoint1"].append(request_id) + # Simulate some work + await asyncio.sleep(0.1) + # Verify request_id is still the same after async work + assert contextual_logging.get_context_metadata("request_id") == request_id + return JSONResponse({"request_id": request_id}) + + @app.route("/endpoint2") + async def endpoint2(request): + request_id = contextual_logging.get_context_metadata("request_id") + request_ids_seen["endpoint2"].append(request_id) + # Simulate some work + await asyncio.sleep(0.1) + # Verify request_id is still the same after async work + assert contextual_logging.get_context_metadata("request_id") == request_id + return JSONResponse({"request_id": request_id}) + + client = TestClient(app) + + # Make concurrent requests + response1 = client.get("/endpoint1") + response2 = client.get("/endpoint2") + response3 = client.get("/endpoint1") + + # All requests should succeed + assert response1.status_code == 200 + assert response2.status_code == 200 + assert response3.status_code == 200 + + # Each request should have gotten a unique request_id + request_id_1 = response1.headers["x-tangle-request-id"] + request_id_2 = response2.headers["x-tangle-request-id"] + request_id_3 = response3.headers["x-tangle-request-id"] + + # All request_ids should be unique + assert request_id_1 != request_id_2 + assert request_id_1 != request_id_3 + assert request_id_2 != request_id_3 + + # Verify endpoints saw the correct request_ids + assert request_ids_seen["endpoint1"][0] == request_id_1 + assert request_ids_seen["endpoint2"][0] == request_id_2 + assert request_ids_seen["endpoint1"][1] == request_id_3 + + +def test_request_id_isolation_with_nested_async_calls(): + """Test that request_id persists correctly through nested async function calls.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + request_ids_collected = [] + + async def helper_function_1(): + """First level helper.""" + request_ids_collected.append( + ("helper1", contextual_logging.get_context_metadata("request_id")) + ) + await asyncio.sleep(0.01) + await helper_function_2() + request_ids_collected.append( + ("helper1_after", contextual_logging.get_context_metadata("request_id")) + ) + + async def helper_function_2(): + """Second level helper.""" + request_ids_collected.append( + ("helper2", contextual_logging.get_context_metadata("request_id")) + ) + await asyncio.sleep(0.01) + request_ids_collected.append( + ("helper2_after", contextual_logging.get_context_metadata("request_id")) + ) + + @app.route("/test") + async def test_route(request): + request_ids_collected.append( + ("start", contextual_logging.get_context_metadata("request_id")) + ) + await helper_function_1() + request_ids_collected.append( + ("end", contextual_logging.get_context_metadata("request_id")) + ) + return JSONResponse({"ok": True}) + + client = TestClient(app) + response = client.get("/test") + + assert response.status_code == 200 + request_id = response.headers["x-tangle-request-id"] + + # All captured request_ids should be the same + for label, captured_request_id in request_ids_collected: + assert captured_request_id == request_id, f"Mismatch at {label}" + + # Should have captured 6 request_ids total + assert len(request_ids_collected) == 6 + + +def test_request_id_does_not_leak_between_requests(): + """Test that request_id from one request doesn't leak into another.""" + app = Starlette() + app.add_middleware(RequestContextMiddleware) + + request_ids_per_request = [] + + @app.route("/test") + async def test_route(request): + # Capture request_id at start + start_request_id = contextual_logging.get_context_metadata("request_id") + request_ids_per_request.append(start_request_id) + + # Do some async work + await asyncio.sleep(0.05) + + # Verify it hasn't changed + end_request_id = contextual_logging.get_context_metadata("request_id") + assert start_request_id == end_request_id + + return JSONResponse({"request_id": end_request_id}) + + client = TestClient(app) + + # Make multiple sequential requests + responses = [client.get("/test") for _ in range(5)] + + # All should succeed + assert all(r.status_code == 200 for r in responses) + + # Extract request_ids from responses + response_request_ids = [r.headers["x-tangle-request-id"] for r in responses] + + # All should be unique + assert len(set(response_request_ids)) == 5 + + # Should match what we captured inside the handler + assert response_request_ids == request_ids_per_request + + +@pytest.mark.asyncio +async def test_contextvars_isolation_across_async_tasks(): + """Direct test of contextvars isolation without HTTP layer.""" + + async def task_with_request_id(task_id: str, expected_request_id: str): + """Simulates a task with its own request_id context.""" + # Set request_id for this task + contextual_logging.set_context_metadata("request_id", expected_request_id) + + # Verify it's set correctly + assert ( + contextual_logging.get_context_metadata("request_id") == expected_request_id + ) + + # Simulate some work + await asyncio.sleep(0.01) + + # Verify request_id is still correct after async work + assert ( + contextual_logging.get_context_metadata("request_id") == expected_request_id + ) + + # More work + await asyncio.sleep(0.01) + + # Still correct + assert ( + contextual_logging.get_context_metadata("request_id") == expected_request_id + ) + + # Clean up + contextual_logging.clear_context_metadata() + + return task_id + + # Run multiple tasks concurrently with different request_ids + tasks = [ + task_with_request_id("task1", "request_aaa111"), + task_with_request_id("task2", "request_bbb222"), + task_with_request_id("task3", "request_ccc333"), + task_with_request_id("task4", "request_ddd444"), + ] + + results = await asyncio.gather(*tasks) + + # All tasks should complete successfully + assert results == ["task1", "task2", "task3", "task4"] + + # After all tasks complete, there should be no request_id in this context + assert contextual_logging.get_context_metadata("request_id") is None + + +def test_request_id_with_context_manager_is_thread_safe(): + """Test that the logging_context context manager works with concurrent access.""" + + collected_request_ids = [] + + def simulate_request_processing(request_id: str): + """Simulates processing with a request_id.""" + with contextual_logging.logging_context(request_id=request_id): + # Verify request_id is set + current = contextual_logging.get_context_metadata("request_id") + collected_request_ids.append((request_id, current)) + assert current == request_id + + # After context exits, should be cleared in this context + # (though in threads, contexts are separate anyway) + + import threading + + # Create threads that will process with different request_ids + threads = [ + threading.Thread(target=simulate_request_processing, args=(f"request_{i:03d}",)) + for i in range(10) + ] + + # Start all threads + for thread in threads: + thread.start() + + # Wait for all to complete + for thread in threads: + thread.join() + + # All threads should have seen their correct request_id + assert len(collected_request_ids) == 10 + for expected, actual in collected_request_ids: + assert expected == actual diff --git a/uv.lock b/uv.lock index 1f106c8..17e9f07 100644 --- a/uv.lock +++ b/uv.lock @@ -38,6 +38,50 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f8/aa/5082412d1ee302e9e7d80b6949bc4d2a8fa1149aaab610c5fc24709605d6/authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a", size = 243608, upload-time = "2025-10-02T13:36:07.637Z" }, ] +[[package]] +name = "black" +version = "26.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "pytokens" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/13/88/560b11e521c522440af991d46848a2bde64b5f7202ec14e1f46f9509d328/black-26.1.0.tar.gz", hash = "sha256:d294ac3340eef9c9eb5d29288e96dc719ff269a88e27b396340459dd85da4c58", size = 658785, upload-time = "2026-01-18T04:50:11.993Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/1b/523329e713f965ad0ea2b7a047eeb003007792a0353622ac7a8cb2ee6fef/black-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ca699710dece84e3ebf6e92ee15f5b8f72870ef984bf944a57a777a48357c168", size = 1849661, upload-time = "2026-01-18T04:59:12.425Z" }, + { url = "https://files.pythonhosted.org/packages/14/82/94c0640f7285fa71c2f32879f23e609dd2aa39ba2641f395487f24a578e7/black-26.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e8e75dabb6eb83d064b0db46392b25cabb6e784ea624219736e8985a6b3675d", size = 1689065, upload-time = "2026-01-18T04:59:13.993Z" }, + { url = "https://files.pythonhosted.org/packages/f0/78/474373cbd798f9291ed8f7107056e343fd39fef42de4a51c7fd0d360840c/black-26.1.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb07665d9a907a1a645ee41a0df8a25ffac8ad9c26cdb557b7b88eeeeec934e0", size = 1751502, upload-time = "2026-01-18T04:59:15.971Z" }, + { url = "https://files.pythonhosted.org/packages/29/89/59d0e350123f97bc32c27c4d79563432d7f3530dca2bff64d855c178af8b/black-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:7ed300200918147c963c87700ccf9966dceaefbbb7277450a8d646fc5646bf24", size = 1400102, upload-time = "2026-01-18T04:59:17.8Z" }, + { url = "https://files.pythonhosted.org/packages/e1/bc/5d866c7ae1c9d67d308f83af5462ca7046760158bbf142502bad8f22b3a1/black-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:c5b7713daea9bf943f79f8c3b46f361cc5229e0e604dcef6a8bb6d1c37d9df89", size = 1207038, upload-time = "2026-01-18T04:59:19.543Z" }, + { url = "https://files.pythonhosted.org/packages/30/83/f05f22ff13756e1a8ce7891db517dbc06200796a16326258268f4658a745/black-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cee1487a9e4c640dc7467aaa543d6c0097c391dc8ac74eb313f2fbf9d7a7cb5", size = 1831956, upload-time = "2026-01-18T04:59:21.38Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/b2c570550e39bedc157715e43927360312d6dd677eed2cc149a802577491/black-26.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d62d14ca31c92adf561ebb2e5f2741bf8dea28aef6deb400d49cca011d186c68", size = 1672499, upload-time = "2026-01-18T04:59:23.257Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d7/990d6a94dc9e169f61374b1c3d4f4dd3037e93c2cc12b6f3b12bc663aa7b/black-26.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb1dafbbaa3b1ee8b4550a84425aac8874e5f390200f5502cf3aee4a2acb2f14", size = 1735431, upload-time = "2026-01-18T04:59:24.729Z" }, + { url = "https://files.pythonhosted.org/packages/36/1c/cbd7bae7dd3cb315dfe6eeca802bb56662cc92b89af272e014d98c1f2286/black-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:101540cb2a77c680f4f80e628ae98bd2bd8812fb9d72ade4f8995c5ff019e82c", size = 1400468, upload-time = "2026-01-18T04:59:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/59/b1/9fe6132bb2d0d1f7094613320b56297a108ae19ecf3041d9678aec381b37/black-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:6f3977a16e347f1b115662be07daa93137259c711e526402aa444d7a88fdc9d4", size = 1207332, upload-time = "2026-01-18T04:59:28.711Z" }, + { url = "https://files.pythonhosted.org/packages/f5/13/710298938a61f0f54cdb4d1c0baeb672c01ff0358712eddaf29f76d32a0b/black-26.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6eeca41e70b5f5c84f2f913af857cf2ce17410847e1d54642e658e078da6544f", size = 1878189, upload-time = "2026-01-18T04:59:30.682Z" }, + { url = "https://files.pythonhosted.org/packages/79/a6/5179beaa57e5dbd2ec9f1c64016214057b4265647c62125aa6aeffb05392/black-26.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd39eef053e58e60204f2cdf059e2442e2eb08f15989eefe259870f89614c8b6", size = 1700178, upload-time = "2026-01-18T04:59:32.387Z" }, + { url = "https://files.pythonhosted.org/packages/8c/04/c96f79d7b93e8f09d9298b333ca0d31cd9b2ee6c46c274fd0f531de9dc61/black-26.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9459ad0d6cd483eacad4c6566b0f8e42af5e8b583cee917d90ffaa3778420a0a", size = 1777029, upload-time = "2026-01-18T04:59:33.767Z" }, + { url = "https://files.pythonhosted.org/packages/49/f9/71c161c4c7aa18bdda3776b66ac2dc07aed62053c7c0ff8bbda8c2624fe2/black-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a19915ec61f3a8746e8b10adbac4a577c6ba9851fa4a9e9fbfbcf319887a5791", size = 1406466, upload-time = "2026-01-18T04:59:35.177Z" }, + { url = "https://files.pythonhosted.org/packages/4a/8b/a7b0f974e473b159d0ac1b6bcefffeb6bec465898a516ee5cc989503cbc7/black-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:643d27fb5facc167c0b1b59d0315f2674a6e950341aed0fc05cf307d22bf4954", size = 1216393, upload-time = "2026-01-18T04:59:37.18Z" }, + { url = "https://files.pythonhosted.org/packages/79/04/fa2f4784f7237279332aa735cdfd5ae2e7730db0072fb2041dadda9ae551/black-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ba1d768fbfb6930fc93b0ecc32a43d8861ded16f47a40f14afa9bb04ab93d304", size = 1877781, upload-time = "2026-01-18T04:59:39.054Z" }, + { url = "https://files.pythonhosted.org/packages/cf/ad/5a131b01acc0e5336740a039628c0ab69d60cf09a2c87a4ec49f5826acda/black-26.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b807c240b64609cb0e80d2200a35b23c7df82259f80bef1b2c96eb422b4aac9", size = 1699670, upload-time = "2026-01-18T04:59:41.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/7c/b05f22964316a52ab6b4265bcd52c0ad2c30d7ca6bd3d0637e438fc32d6e/black-26.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1de0f7d01cc894066a1153b738145b194414cc6eeaad8ef4397ac9abacf40f6b", size = 1775212, upload-time = "2026-01-18T04:59:42.545Z" }, + { url = "https://files.pythonhosted.org/packages/a6/a3/e8d1526bea0446e040193185353920a9506eab60a7d8beb062029129c7d2/black-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:91a68ae46bf07868963671e4d05611b179c2313301bd756a89ad4e3b3db2325b", size = 1409953, upload-time = "2026-01-18T04:59:44.357Z" }, + { url = "https://files.pythonhosted.org/packages/c7/5a/d62ebf4d8f5e3a1daa54adaab94c107b57be1b1a2f115a0249b41931e188/black-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:be5e2fe860b9bd9edbf676d5b60a9282994c03fbbd40fe8f5e75d194f96064ca", size = 1217707, upload-time = "2026-01-18T04:59:45.719Z" }, + { url = "https://files.pythonhosted.org/packages/6a/83/be35a175aacfce4b05584ac415fd317dd6c24e93a0af2dcedce0f686f5d8/black-26.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9dc8c71656a79ca49b8d3e2ce8103210c9481c57798b48deeb3a8bb02db5f115", size = 1871864, upload-time = "2026-01-18T04:59:47.586Z" }, + { url = "https://files.pythonhosted.org/packages/a5/f5/d33696c099450b1274d925a42b7a030cd3ea1f56d72e5ca8bbed5f52759c/black-26.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b22b3810451abe359a964cc88121d57f7bce482b53a066de0f1584988ca36e79", size = 1701009, upload-time = "2026-01-18T04:59:49.443Z" }, + { url = "https://files.pythonhosted.org/packages/1b/87/670dd888c537acb53a863bc15abbd85b22b429237d9de1b77c0ed6b79c42/black-26.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:53c62883b3f999f14e5d30b5a79bd437236658ad45b2f853906c7cbe79de00af", size = 1767806, upload-time = "2026-01-18T04:59:50.769Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9c/cd3deb79bfec5bcf30f9d2100ffeec63eecce826eb63e3961708b9431ff1/black-26.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:f016baaadc423dc960cdddf9acae679e71ee02c4c341f78f3179d7e4819c095f", size = 1433217, upload-time = "2026-01-18T04:59:52.218Z" }, + { url = "https://files.pythonhosted.org/packages/4e/29/f3be41a1cf502a283506f40f5d27203249d181f7a1a2abce1c6ce188035a/black-26.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:66912475200b67ef5a0ab665011964bf924745103f51977a78b4fb92a9fc1bf0", size = 1245773, upload-time = "2026-01-18T04:59:54.457Z" }, + { url = "https://files.pythonhosted.org/packages/e4/3d/51bdb3ecbfadfaf825ec0c75e1de6077422b4afa2091c6c9ba34fbfc0c2d/black-26.1.0-py3-none-any.whl", hash = "sha256:1054e8e47ebd686e078c0bb0eaf31e6ce69c966058d122f2c0c950311f9f3ede", size = 204010, upload-time = "2026-01-18T04:50:09.978Z" }, +] + [[package]] name = "cachetools" version = "5.5.2" @@ -242,6 +286,7 @@ dependencies = [ [package.dev-dependencies] dev = [ + { name = "black" }, { name = "pytest" }, ] huggingface = [ @@ -258,7 +303,10 @@ requires-dist = [ ] [package.metadata.requires-dev] -dev = [{ name = "pytest", specifier = ">=8.4.2" }] +dev = [ + { name = "black", specifier = ">=26.1.0" }, + { name = "pytest", specifier = ">=8.4.2" }, +] huggingface = [{ name = "huggingface-hub", extras = ["oauth"], specifier = ">=0.35.3" }] [[package]] @@ -796,6 +844,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -814,6 +871,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] +[[package]] +name = "pathspec" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/b2/bb8e495d5262bfec41ab5cb18f522f1012933347fb5d9e62452d446baca2/pathspec-1.0.3.tar.gz", hash = "sha256:bac5cf97ae2c2876e2d25ebb15078eb04d76e4b98921ee31c6f85ade8b59444d", size = 130841, upload-time = "2026-01-09T15:46:46.009Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/2b/121e912bd60eebd623f873fd090de0e84f322972ab25a7f9044c056804ed/pathspec-1.0.3-py3-none-any.whl", hash = "sha256:e80767021c1cc524aa3fb14bedda9c34406591343cc42797b386ce7b9354fb6c", size = 55021, upload-time = "2026-01-09T15:46:44.652Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -1012,6 +1087,45 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, ] +[[package]] +name = "pytokens" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/16/4b9cfd90d55e66ffdb277d7ebe3bc25250c2311336ec3fc73b2673c794d5/pytokens-0.4.0.tar.gz", hash = "sha256:6b0b03e6ea7c9f9d47c5c61164b69ad30f4f0d70a5d9fe7eac4d19f24f77af2d", size = 15039, upload-time = "2026-01-19T07:59:50.623Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/c5/c20818fef16c4ab5f9fd7bad699268ba21bf24f655711df4e33bb7a9ab47/pytokens-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:af0c3166aea367a9e755a283171befb92dd3043858b94ae9b3b7efbe9def26a3", size = 160682, upload-time = "2026-01-19T07:58:51.583Z" }, + { url = "https://files.pythonhosted.org/packages/46/c4/ad03e4abe05c6af57c4d7f8f031fafe80f0074796d09ab5a73bf2fac895f/pytokens-0.4.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:daae524ed14ca459932cbf51d74325bea643701ba8a8b0cc2d10f7cd4b3e2b63", size = 245748, upload-time = "2026-01-19T07:58:53.944Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b9/4a7ee0a692603b16d8fdfbc5c44e0f6910d45eec6b2c2188daa4670f179d/pytokens-0.4.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e95cb158c44d642ed62f555bf8136bbe780dbd64d2fb0b9169e11ffb944664c3", size = 258671, upload-time = "2026-01-19T07:58:55.667Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a3/02bb29dc4985fb8d759d9c96f189c3a828e74f0879fdb843e9fb7a1db637/pytokens-0.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:df58d44630eaf25f587540e94bdf1fc50b4e6d5f212c786de0fb024bfcb8753a", size = 261749, upload-time = "2026-01-19T07:58:57.442Z" }, + { url = "https://files.pythonhosted.org/packages/10/f2/9a8bdcc5444d85d4dba4aa1b530d81af3edc4a9ab76bf1d53ea8bfe8479d/pytokens-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55efcc36f9a2e0e930cfba0ce7f83445306b02f8326745585ed5551864eba73a", size = 102805, upload-time = "2026-01-19T07:58:59.068Z" }, + { url = "https://files.pythonhosted.org/packages/b4/05/3196399a353dd4cd99138a88f662810979ee2f1a1cdb0b417cb2f4507836/pytokens-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:92eb3ef88f27c22dc9dbab966ace4d61f6826e02ba04dac8e2d65ea31df56c8e", size = 160075, upload-time = "2026-01-19T07:59:00.316Z" }, + { url = "https://files.pythonhosted.org/packages/28/1d/c8fc4ed0a1c4f660391b201cda00b1d5bbcc00e2998e8bcd48b15eefd708/pytokens-0.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4b77858a680635ee9904306f54b0ee4781effb89e211ba0a773d76539537165", size = 247318, upload-time = "2026-01-19T07:59:01.636Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0e/53e55ba01f3e858d229cd84b02481542f42ba59050483a78bf2447ee1af7/pytokens-0.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25cacc20c2ad90acb56f3739d87905473c54ca1fa5967ffcd675463fe965865e", size = 259752, upload-time = "2026-01-19T07:59:04.229Z" }, + { url = "https://files.pythonhosted.org/packages/dc/56/2d930d7f899e3f21868ca6e8ec739ac31e8fc532f66e09cbe45d3df0a84f/pytokens-0.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:628fab535ebc9079e4db35cd63cb401901c7ce8720a9834f9ad44b9eb4e0f1d4", size = 262842, upload-time = "2026-01-19T07:59:06.14Z" }, + { url = "https://files.pythonhosted.org/packages/42/dd/4e7e6920d23deffaf66e6f40d45f7610dcbc132ca5d90ab4faccef22f624/pytokens-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:4d0f568d7e82b7e96be56d03b5081de40e43c904eb6492bf09aaca47cd55f35b", size = 102620, upload-time = "2026-01-19T07:59:07.839Z" }, + { url = "https://files.pythonhosted.org/packages/3d/65/65460ebbfefd0bc1b160457904370d44f269e6e4582e0a9b6cba7c267b04/pytokens-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd8da894e5a29ba6b6da8be06a4f7589d7220c099b5e363cb0643234b9b38c2a", size = 159864, upload-time = "2026-01-19T07:59:08.908Z" }, + { url = "https://files.pythonhosted.org/packages/25/70/a46669ec55876c392036b4da9808b5c3b1c5870bbca3d4cc923bf68bdbc1/pytokens-0.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:237ba7cfb677dbd3b01b09860810aceb448871150566b93cd24501d5734a04b1", size = 254448, upload-time = "2026-01-19T07:59:10.594Z" }, + { url = "https://files.pythonhosted.org/packages/62/0b/c486fc61299c2fc3b7f88ee4e115d4c8b6ffd1a7f88dc94b398b5b1bc4b8/pytokens-0.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01d1a61e36812e4e971cfe2c0e4c1f2d66d8311031dac8bf168af8a249fa04dd", size = 268863, upload-time = "2026-01-19T07:59:12.31Z" }, + { url = "https://files.pythonhosted.org/packages/79/92/b036af846707d25feaff7cafbd5280f1bd6a1034c16bb06a7c910209c1ab/pytokens-0.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e47e2ef3ec6ee86909e520d79f965f9b23389fda47460303cf715d510a6fe544", size = 267181, upload-time = "2026-01-19T07:59:13.856Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c0/6d011fc00fefa74ce34816c84a923d2dd7c46b8dbc6ee52d13419786834c/pytokens-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d36954aba4557fd5a418a03cf595ecbb1cdcce119f91a49b19ef09d691a22ae", size = 102814, upload-time = "2026-01-19T07:59:15.288Z" }, + { url = "https://files.pythonhosted.org/packages/98/63/627b7e71d557383da5a97f473ad50f8d9c2c1f55c7d3c2531a120c796f6e/pytokens-0.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73eff3bdd8ad08da679867992782568db0529b887bed4c85694f84cdf35eafc6", size = 159744, upload-time = "2026-01-19T07:59:16.88Z" }, + { url = "https://files.pythonhosted.org/packages/28/d7/16f434c37ec3824eba6bcb6e798e5381a8dc83af7a1eda0f95c16fe3ade5/pytokens-0.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d97cc1f91b1a8e8ebccf31c367f28225699bea26592df27141deade771ed0afb", size = 253207, upload-time = "2026-01-19T07:59:18.069Z" }, + { url = "https://files.pythonhosted.org/packages/ab/96/04102856b9527701ae57d74a6393d1aca5bad18a1b1ca48ccffb3c93b392/pytokens-0.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a2c8952c537cb73a1a74369501a83b7f9d208c3cf92c41dd88a17814e68d48ce", size = 267452, upload-time = "2026-01-19T07:59:19.328Z" }, + { url = "https://files.pythonhosted.org/packages/0e/ef/0936eb472b89ab2d2c2c24bb81c50417e803fa89c731930d9fb01176fe9f/pytokens-0.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dbf56f3c748aed9310b310d5b8b14e2c96d3ad682ad5a943f381bdbbdddf753", size = 265965, upload-time = "2026-01-19T07:59:20.613Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f5/64f3d6f7df4a9e92ebda35ee85061f6260e16eac82df9396020eebbca775/pytokens-0.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:e131804513597f2dff2b18f9911d9b6276e21ef3699abeffc1c087c65a3d975e", size = 102813, upload-time = "2026-01-19T07:59:22.012Z" }, + { url = "https://files.pythonhosted.org/packages/5f/f1/d07e6209f18ef378fc2ae9dee8d1dfe91fd2447c2e2dbfa32867b6dd30cf/pytokens-0.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0d7374c917197106d3c4761374718bc55ea2e9ac0fb94171588ef5840ee1f016", size = 159968, upload-time = "2026-01-19T07:59:23.07Z" }, + { url = "https://files.pythonhosted.org/packages/0a/73/0eb111400abd382a04f253b269819db9fcc748aa40748441cebdcb6d068f/pytokens-0.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cd3fa1caf9e47a72ee134a29ca6b5bea84712724bba165d6628baa190c6ea5b", size = 253373, upload-time = "2026-01-19T07:59:24.381Z" }, + { url = "https://files.pythonhosted.org/packages/bd/8d/9e4e2fdb5bcaba679e54afcc304e9f13f488eb4d626e6b613f9553e03dbd/pytokens-0.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6986576b7b07fe9791854caa5347923005a80b079d45b63b0be70d50cce5f1", size = 267024, upload-time = "2026-01-19T07:59:25.74Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b7/e0a370321af2deb772cff14ff337e1140d1eac2c29a8876bfee995f486f0/pytokens-0.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9940f7c2e2f54fb1cb5fe17d0803c54da7a2bf62222704eb4217433664a186a7", size = 270912, upload-time = "2026-01-19T07:59:27.072Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/4348f916c440d4c3e68b53b4ed0e66b292d119e799fa07afa159566dcc86/pytokens-0.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:54691cf8f299e7efabcc25adb4ce715d3cef1491e1c930eaf555182f898ef66a", size = 103836, upload-time = "2026-01-19T07:59:28.112Z" }, + { url = "https://files.pythonhosted.org/packages/e8/f8/a693c0cfa9c783a2a8c4500b7b2a8bab420f8ca4f2d496153226bf1c12e3/pytokens-0.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:94ff5db97a0d3cd7248a5b07ba2167bd3edc1db92f76c6db00137bbaf068ddf8", size = 167643, upload-time = "2026-01-19T07:59:29.292Z" }, + { url = "https://files.pythonhosted.org/packages/c0/dd/a64eb1e9f3ec277b69b33ef1b40ffbcc8f0a3bafcde120997efc7bdefebf/pytokens-0.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0dd6261cd9cc95fae1227b1b6ebee023a5fd4a4b6330b071c73a516f5f59b63", size = 289553, upload-time = "2026-01-19T07:59:30.537Z" }, + { url = "https://files.pythonhosted.org/packages/df/22/06c1079d93dbc3bca5d013e1795f3d8b9ed6c87290acd6913c1c526a6bb2/pytokens-0.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cdca8159df407dbd669145af4171a0d967006e0be25f3b520896bc7068f02c4", size = 302490, upload-time = "2026-01-19T07:59:32.352Z" }, + { url = "https://files.pythonhosted.org/packages/8d/de/a6f5e43115b4fbf4b93aa87d6c83c79932cdb084f9711daae04549e1e4ad/pytokens-0.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4b5770abeb2a24347380a1164a558f0ebe06e98aedbd54c45f7929527a5fb26e", size = 305652, upload-time = "2026-01-19T07:59:33.685Z" }, + { url = "https://files.pythonhosted.org/packages/ab/3d/c136e057cb622e36e0c3ff7a8aaa19ff9720050c4078235691da885fe6ee/pytokens-0.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:74500d72c561dad14c037a9e86a657afd63e277dd5a3bb7570932ab7a3b12551", size = 115472, upload-time = "2026-01-19T07:59:34.734Z" }, + { url = "https://files.pythonhosted.org/packages/7c/3c/6941a82f4f130af6e1c68c076b6789069ef10c04559bd4733650f902fd3b/pytokens-0.4.0-py3-none-any.whl", hash = "sha256:0508d11b4de157ee12063901603be87fb0253e8f4cb9305eb168b1202ab92068", size = 13224, upload-time = "2026-01-19T07:59:49.822Z" }, +] + [[package]] name = "pywin32" version = "310"