Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion shared/logging/src/airflow_shared/logging/structlog.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from functools import cache, cached_property, partial
from pathlib import Path
from types import ModuleType
from typing import TYPE_CHECKING, Any, BinaryIO, Generic, TextIO, TypeVar, cast
from typing import IO, TYPE_CHECKING, Any, BinaryIO, Generic, TextIO, TypeVar, cast

import pygtrie
import structlog
Expand Down Expand Up @@ -803,6 +803,16 @@ def reconfigure_logger(
)


def clear_structlog_shared_lock(log_file_descriptor: IO[Any]):
try:
from structlog._output import WRITE_LOCKS
except ImportError:
WRITE_LOCKS = None # type: ignore[assignment]

if WRITE_LOCKS is not None:
WRITE_LOCKS.pop(log_file_descriptor, None)


if __name__ == "__main__":
configure_logging(
# json_output=True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import structlog
from pydantic import Field, TypeAdapter

from airflow.sdk._shared.logging.structlog import clear_structlog_shared_lock
from airflow.sdk._shared.module_loading import accepts_context, accepts_keyword_args
from airflow.sdk.exceptions import ErrorType
from airflow.sdk.execution_time.comms import (
Expand Down Expand Up @@ -378,3 +379,4 @@ def supervise_callback(
finally:
if log_path and log_file_descriptor:
log_file_descriptor.close()
clear_structlog_shared_lock(log_file_descriptor)
4 changes: 3 additions & 1 deletion task-sdk/src/airflow/sdk/execution_time/supervisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
import structlog
from pydantic import BaseModel, TypeAdapter

from airflow.sdk._shared.logging.structlog import reconfigure_logger
from airflow.sdk._shared.logging.structlog import clear_structlog_shared_lock, reconfigure_logger
from airflow.sdk.api.client import Client, ServerResponseError
from airflow.sdk.api.datamodels._generated import (
AssetResponse,
Expand Down Expand Up @@ -2260,6 +2260,8 @@ def supervise_task(
finally:
if log_path and log_file_descriptor:
log_file_descriptor.close()
clear_structlog_shared_lock(log_file_descriptor)

provider = trace.get_tracer_provider()
if hasattr(provider, "force_flush"):
provider.force_flush(timeout_millis=5000) # upper bound, not a fixed wait
Expand Down
Loading