diff --git a/airflow/config_templates/airflow_local_settings.py b/airflow/config_templates/airflow_local_settings.py index bb0812ca109e4..2ce5c8378229e 100644 --- a/airflow/config_templates/airflow_local_settings.py +++ b/airflow/config_templates/airflow_local_settings.py @@ -61,10 +61,6 @@ "logging", "DAG_PROCESSOR_MANAGER_LOG_STDOUT" ) -# FILENAME_TEMPLATE only uses in Remote Logging Handlers since Airflow 2.3.3 -# All of these handlers inherited from FileTaskHandler and providing any value rather than None -# would raise deprecation warning. -FILENAME_TEMPLATE: str | None = None PROCESSOR_FILENAME_TEMPLATE: str = conf.get_mandatory_value("logging", "LOG_PROCESSOR_FILENAME_TEMPLATE") @@ -230,7 +226,6 @@ "formatter": "airflow", "base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)), "s3_log_folder": REMOTE_BASE_LOG_FOLDER, - "filename_template": FILENAME_TEMPLATE, }, } @@ -243,7 +238,6 @@ "formatter": "airflow", "base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)), "log_group_arn": url_parts.netloc + url_parts.path, - "filename_template": FILENAME_TEMPLATE, }, } @@ -256,7 +250,6 @@ "formatter": "airflow", "base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)), "gcs_log_folder": REMOTE_BASE_LOG_FOLDER, - "filename_template": FILENAME_TEMPLATE, "gcp_key_path": key_path, }, } @@ -273,7 +266,6 @@ "base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)), "wasb_log_folder": REMOTE_BASE_LOG_FOLDER, "wasb_container": wasb_log_container, - "filename_template": FILENAME_TEMPLATE, }, } @@ -299,7 +291,6 @@ "formatter": "airflow", "base_log_folder": os.path.expanduser(BASE_LOG_FOLDER), "oss_log_folder": REMOTE_BASE_LOG_FOLDER, - "filename_template": FILENAME_TEMPLATE, }, } DEFAULT_LOGGING_CONFIG["handlers"].update(OSS_REMOTE_HANDLERS) @@ -310,7 +301,6 @@ "formatter": "airflow", "base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)), "hdfs_log_folder": REMOTE_BASE_LOG_FOLDER, - "filename_template": FILENAME_TEMPLATE, }, } DEFAULT_LOGGING_CONFIG["handlers"].update(HDFS_REMOTE_HANDLERS) @@ -328,7 +318,6 @@ "class": "airflow.providers.elasticsearch.log.es_task_handler.ElasticsearchTaskHandler", "formatter": "airflow", "base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)), - "filename_template": FILENAME_TEMPLATE, "end_of_log_mark": ELASTICSEARCH_END_OF_LOG_MARK, "host": ELASTICSEARCH_HOST, "frontend": ELASTICSEARCH_FRONTEND, diff --git a/airflow/providers/alibaba/cloud/log/oss_task_handler.py b/airflow/providers/alibaba/cloud/log/oss_task_handler.py index 866f926eda732..3b06bd1ef3c1d 100644 --- a/airflow/providers/alibaba/cloud/log/oss_task_handler.py +++ b/airflow/providers/alibaba/cloud/log/oss_task_handler.py @@ -36,9 +36,9 @@ class OSSTaskHandler(FileTaskHandler, LoggingMixin): Extends airflow FileTaskHandler and uploads to and reads from OSS remote storage. """ - def __init__(self, base_log_folder, oss_log_folder, filename_template=None, **kwargs): + def __init__(self, base_log_folder, oss_log_folder, **kwargs): self.log.info("Using oss_task_handler for remote logging...") - super().__init__(base_log_folder, filename_template) + super().__init__(base_log_folder) (self.bucket_name, self.base_folder) = OSSHook.parse_oss_url(oss_log_folder) self.log_relative_path = "" self._hook = None diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index 865b30233db3e..69efacfad3536 100644 --- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -71,13 +71,12 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin): :param base_log_folder: base folder to store logs locally :param log_group_arn: ARN of the Cloudwatch log group for remote log storage with format ``arn:aws:logs:{region name}:{account id}:log-group:{group name}`` - :param filename_template: template for file name (local storage) or log stream name (remote) """ trigger_should_wrap = True - def __init__(self, base_log_folder: str, log_group_arn: str, filename_template: str | None = None): - super().__init__(base_log_folder, filename_template) + def __init__(self, base_log_folder: str, log_group_arn: str): + super().__init__(base_log_folder) split_arn = log_group_arn.split(":") self.handler = None diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py index 96cf54478a144..fec1344c76c9b 100644 --- a/airflow/providers/amazon/aws/log/s3_task_handler.py +++ b/airflow/providers/amazon/aws/log/s3_task_handler.py @@ -42,10 +42,8 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin): trigger_should_wrap = True - def __init__( - self, base_log_folder: str, s3_log_folder: str, filename_template: str | None = None, **kwargs - ): - super().__init__(base_log_folder, filename_template) + def __init__(self, base_log_folder: str, s3_log_folder: str, **kwargs): + super().__init__(base_log_folder) self.handler: logging.FileHandler | None = None self.remote_base = s3_log_folder self.log_relative_path = "" diff --git a/airflow/providers/apache/hdfs/log/hdfs_task_handler.py b/airflow/providers/apache/hdfs/log/hdfs_task_handler.py index 2a62aeef1397d..2007a1eca1b92 100644 --- a/airflow/providers/apache/hdfs/log/hdfs_task_handler.py +++ b/airflow/providers/apache/hdfs/log/hdfs_task_handler.py @@ -32,10 +32,8 @@ class HdfsTaskHandler(FileTaskHandler, LoggingMixin): """Logging handler to upload and read from HDFS.""" - def __init__( - self, base_log_folder: str, hdfs_log_folder: str, filename_template: str | None = None, **kwargs - ): - super().__init__(base_log_folder, filename_template) + def __init__(self, base_log_folder: str, hdfs_log_folder: str, **kwargs): + super().__init__(base_log_folder) self.remote_base = urlsplit(hdfs_log_folder).path self.log_relative_path = "" self._hook = None diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/airflow/providers/elasticsearch/log/es_task_handler.py index b9da3ea3e867d..6473b8ddd7287 100644 --- a/airflow/providers/elasticsearch/log/es_task_handler.py +++ b/airflow/providers/elasticsearch/log/es_task_handler.py @@ -158,14 +158,13 @@ def __init__( index_patterns_callable: str = conf.get("elasticsearch", "index_patterns_callable", fallback=""), es_kwargs: dict | None | Literal["default_es_kwargs"] = "default_es_kwargs", *, - filename_template: str | None = None, log_id_template: str | None = None, ): es_kwargs = es_kwargs or {} if es_kwargs == "default_es_kwargs": es_kwargs = get_es_kwargs_from_config() host = self.format_url(host) - super().__init__(base_log_folder, filename_template) + super().__init__(base_log_folder) self.closed = False self.client = elasticsearch.Elasticsearch(host, **es_kwargs) diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py index 1f097f05be625..8cdde520e461f 100644 --- a/airflow/providers/google/cloud/log/gcs_task_handler.py +++ b/airflow/providers/google/cloud/log/gcs_task_handler.py @@ -79,14 +79,13 @@ def __init__( *, base_log_folder: str, gcs_log_folder: str, - filename_template: str | None = None, gcp_key_path: str | None = None, gcp_keyfile_dict: dict | None = None, gcp_scopes: Collection[str] | None = _DEFAULT_SCOPESS, project_id: str = PROVIDE_PROJECT_ID, **kwargs, ): - super().__init__(base_log_folder, filename_template) + super().__init__(base_log_folder) self.handler: logging.FileHandler | None = None self.remote_base = gcs_log_folder self.log_relative_path = "" diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index 5fe96bb6a3929..df21930237fd0 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -49,11 +49,9 @@ def __init__( base_log_folder: str, wasb_log_folder: str, wasb_container: str, - *, - filename_template: str | None = None, **kwargs, ) -> None: - super().__init__(base_log_folder, filename_template) + super().__init__(base_log_folder) self.handler: logging.FileHandler | None = None self.wasb_container = wasb_container self.remote_base = wasb_log_folder diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index e99ffae0c94d8..2fecb87bedebe 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -22,7 +22,6 @@ import inspect import logging import os -import warnings from contextlib import suppress from enum import Enum from functools import cached_property @@ -34,7 +33,7 @@ from airflow.api_internal.internal_api_call import internal_api_call from airflow.configuration import conf -from airflow.exceptions import AirflowException, RemovedInAirflow3Warning +from airflow.exceptions import AirflowException from airflow.executors.executor_loader import ExecutorLoader from airflow.utils.context import Context from airflow.utils.helpers import parse_template_string, render_template_to_string @@ -175,7 +174,6 @@ class FileTaskHandler(logging.Handler): instance context. It reads logs from task instance's host machine. :param base_log_folder: Base log folder to place logs. - :param filename_template: template filename string :param max_bytes: max bytes size for the log file :param backup_count: backup file count for the log file :param delay: default False -> StreamHandler, True -> Handler @@ -189,7 +187,6 @@ class FileTaskHandler(logging.Handler): def __init__( self, base_log_folder: str, - filename_template: str | None = None, max_bytes: int = 0, backup_count: int = 0, delay: bool = False, @@ -197,14 +194,6 @@ def __init__( super().__init__() self.handler: logging.Handler | None = None self.local_base = base_log_folder - if filename_template is not None: - warnings.warn( - "Passing filename_template to a log handler is deprecated and has no effect", - RemovedInAirflow3Warning, - # We want to reference the stack that actually instantiates the - # handler, not the one that calls super()__init__. - stacklevel=(2 if isinstance(self, FileTaskHandler) else 3), - ) self.maintain_propagate: bool = False self.max_bytes = max_bytes self.backup_count = backup_count diff --git a/newsfragments/41552.significant.rst b/newsfragments/41552.significant.rst new file mode 100644 index 0000000000000..475b0e159474a --- /dev/null +++ b/newsfragments/41552.significant.rst @@ -0,0 +1 @@ +Removed deprecated ``filename_template`` argument from ``airflow.utils.log.file_task_handler.FileTaskHandler``. diff --git a/tests/utils/test_log_handlers.py b/tests/utils/test_log_handlers.py index da10034cb9370..8bd4ab684bdbf 100644 --- a/tests/utils/test_log_handlers.py +++ b/tests/utils/test_log_handlers.py @@ -33,7 +33,6 @@ from requests.adapters import Response from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG -from airflow.exceptions import RemovedInAirflow3Warning from airflow.executors import executor_loader from airflow.jobs.job import Job from airflow.jobs.triggerer_job_runner import TriggererJobRunner @@ -79,13 +78,6 @@ def setup_method(self): def teardown_method(self): self.clean_up() - def test_deprecated_filename_template(self): - with pytest.warns( - RemovedInAirflow3Warning, - match="Passing filename_template to a log handler is deprecated and has no effect", - ): - FileTaskHandler("", filename_template="/foo/bar") - def test_default_task_logging_setup(self): # file task handler is used by default. logger = logging.getLogger(TASK_LOGGER)