Skip to content

Commit

Permalink
Remove deprecated log handler argument filename_template (apache#41552)
Browse files Browse the repository at this point in the history
  • Loading branch information
dirrao authored Aug 20, 2024
1 parent 5cb582b commit 716c430
Show file tree
Hide file tree
Showing 11 changed files with 13 additions and 51 deletions.
11 changes: 0 additions & 11 deletions airflow/config_templates/airflow_local_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,6 @@
"logging", "DAG_PROCESSOR_MANAGER_LOG_STDOUT"
)

# FILENAME_TEMPLATE only uses in Remote Logging Handlers since Airflow 2.3.3
# All of these handlers inherited from FileTaskHandler and providing any value rather than None
# would raise deprecation warning.
FILENAME_TEMPLATE: str | None = None

PROCESSOR_FILENAME_TEMPLATE: str = conf.get_mandatory_value("logging", "LOG_PROCESSOR_FILENAME_TEMPLATE")

Expand Down Expand Up @@ -230,7 +226,6 @@
"formatter": "airflow",
"base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)),
"s3_log_folder": REMOTE_BASE_LOG_FOLDER,
"filename_template": FILENAME_TEMPLATE,
},
}

Expand All @@ -243,7 +238,6 @@
"formatter": "airflow",
"base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)),
"log_group_arn": url_parts.netloc + url_parts.path,
"filename_template": FILENAME_TEMPLATE,
},
}

Expand All @@ -256,7 +250,6 @@
"formatter": "airflow",
"base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)),
"gcs_log_folder": REMOTE_BASE_LOG_FOLDER,
"filename_template": FILENAME_TEMPLATE,
"gcp_key_path": key_path,
},
}
Expand All @@ -273,7 +266,6 @@
"base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)),
"wasb_log_folder": REMOTE_BASE_LOG_FOLDER,
"wasb_container": wasb_log_container,
"filename_template": FILENAME_TEMPLATE,
},
}

Expand All @@ -299,7 +291,6 @@
"formatter": "airflow",
"base_log_folder": os.path.expanduser(BASE_LOG_FOLDER),
"oss_log_folder": REMOTE_BASE_LOG_FOLDER,
"filename_template": FILENAME_TEMPLATE,
},
}
DEFAULT_LOGGING_CONFIG["handlers"].update(OSS_REMOTE_HANDLERS)
Expand All @@ -310,7 +301,6 @@
"formatter": "airflow",
"base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)),
"hdfs_log_folder": REMOTE_BASE_LOG_FOLDER,
"filename_template": FILENAME_TEMPLATE,
},
}
DEFAULT_LOGGING_CONFIG["handlers"].update(HDFS_REMOTE_HANDLERS)
Expand All @@ -328,7 +318,6 @@
"class": "airflow.providers.elasticsearch.log.es_task_handler.ElasticsearchTaskHandler",
"formatter": "airflow",
"base_log_folder": str(os.path.expanduser(BASE_LOG_FOLDER)),
"filename_template": FILENAME_TEMPLATE,
"end_of_log_mark": ELASTICSEARCH_END_OF_LOG_MARK,
"host": ELASTICSEARCH_HOST,
"frontend": ELASTICSEARCH_FRONTEND,
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/alibaba/cloud/log/oss_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ class OSSTaskHandler(FileTaskHandler, LoggingMixin):
Extends airflow FileTaskHandler and uploads to and reads from OSS remote storage.
"""

def __init__(self, base_log_folder, oss_log_folder, filename_template=None, **kwargs):
def __init__(self, base_log_folder, oss_log_folder, **kwargs):
self.log.info("Using oss_task_handler for remote logging...")
super().__init__(base_log_folder, filename_template)
super().__init__(base_log_folder)
(self.bucket_name, self.base_folder) = OSSHook.parse_oss_url(oss_log_folder)
self.log_relative_path = ""
self._hook = None
Expand Down
5 changes: 2 additions & 3 deletions airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,13 +71,12 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
:param base_log_folder: base folder to store logs locally
:param log_group_arn: ARN of the Cloudwatch log group for remote log storage
with format ``arn:aws:logs:{region name}:{account id}:log-group:{group name}``
:param filename_template: template for file name (local storage) or log stream name (remote)
"""

trigger_should_wrap = True

def __init__(self, base_log_folder: str, log_group_arn: str, filename_template: str | None = None):
super().__init__(base_log_folder, filename_template)
def __init__(self, base_log_folder: str, log_group_arn: str):
super().__init__(base_log_folder)
split_arn = log_group_arn.split(":")

self.handler = None
Expand Down
6 changes: 2 additions & 4 deletions airflow/providers/amazon/aws/log/s3_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,8 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):

trigger_should_wrap = True

def __init__(
self, base_log_folder: str, s3_log_folder: str, filename_template: str | None = None, **kwargs
):
super().__init__(base_log_folder, filename_template)
def __init__(self, base_log_folder: str, s3_log_folder: str, **kwargs):
super().__init__(base_log_folder)
self.handler: logging.FileHandler | None = None
self.remote_base = s3_log_folder
self.log_relative_path = ""
Expand Down
6 changes: 2 additions & 4 deletions airflow/providers/apache/hdfs/log/hdfs_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,8 @@
class HdfsTaskHandler(FileTaskHandler, LoggingMixin):
"""Logging handler to upload and read from HDFS."""

def __init__(
self, base_log_folder: str, hdfs_log_folder: str, filename_template: str | None = None, **kwargs
):
super().__init__(base_log_folder, filename_template)
def __init__(self, base_log_folder: str, hdfs_log_folder: str, **kwargs):
super().__init__(base_log_folder)
self.remote_base = urlsplit(hdfs_log_folder).path
self.log_relative_path = ""
self._hook = None
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/elasticsearch/log/es_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,14 +158,13 @@ def __init__(
index_patterns_callable: str = conf.get("elasticsearch", "index_patterns_callable", fallback=""),
es_kwargs: dict | None | Literal["default_es_kwargs"] = "default_es_kwargs",
*,
filename_template: str | None = None,
log_id_template: str | None = None,
):
es_kwargs = es_kwargs or {}
if es_kwargs == "default_es_kwargs":
es_kwargs = get_es_kwargs_from_config()
host = self.format_url(host)
super().__init__(base_log_folder, filename_template)
super().__init__(base_log_folder)
self.closed = False

self.client = elasticsearch.Elasticsearch(host, **es_kwargs)
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/google/cloud/log/gcs_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,14 +79,13 @@ def __init__(
*,
base_log_folder: str,
gcs_log_folder: str,
filename_template: str | None = None,
gcp_key_path: str | None = None,
gcp_keyfile_dict: dict | None = None,
gcp_scopes: Collection[str] | None = _DEFAULT_SCOPESS,
project_id: str = PROVIDE_PROJECT_ID,
**kwargs,
):
super().__init__(base_log_folder, filename_template)
super().__init__(base_log_folder)
self.handler: logging.FileHandler | None = None
self.remote_base = gcs_log_folder
self.log_relative_path = ""
Expand Down
4 changes: 1 addition & 3 deletions airflow/providers/microsoft/azure/log/wasb_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,9 @@ def __init__(
base_log_folder: str,
wasb_log_folder: str,
wasb_container: str,
*,
filename_template: str | None = None,
**kwargs,
) -> None:
super().__init__(base_log_folder, filename_template)
super().__init__(base_log_folder)
self.handler: logging.FileHandler | None = None
self.wasb_container = wasb_container
self.remote_base = wasb_log_folder
Expand Down
13 changes: 1 addition & 12 deletions airflow/utils/log/file_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import inspect
import logging
import os
import warnings
from contextlib import suppress
from enum import Enum
from functools import cached_property
Expand All @@ -34,7 +33,7 @@

from airflow.api_internal.internal_api_call import internal_api_call
from airflow.configuration import conf
from airflow.exceptions import AirflowException, RemovedInAirflow3Warning
from airflow.exceptions import AirflowException
from airflow.executors.executor_loader import ExecutorLoader
from airflow.utils.context import Context
from airflow.utils.helpers import parse_template_string, render_template_to_string
Expand Down Expand Up @@ -175,7 +174,6 @@ class FileTaskHandler(logging.Handler):
instance context. It reads logs from task instance's host machine.
:param base_log_folder: Base log folder to place logs.
:param filename_template: template filename string
:param max_bytes: max bytes size for the log file
:param backup_count: backup file count for the log file
:param delay: default False -> StreamHandler, True -> Handler
Expand All @@ -189,22 +187,13 @@ class FileTaskHandler(logging.Handler):
def __init__(
self,
base_log_folder: str,
filename_template: str | None = None,
max_bytes: int = 0,
backup_count: int = 0,
delay: bool = False,
):
super().__init__()
self.handler: logging.Handler | None = None
self.local_base = base_log_folder
if filename_template is not None:
warnings.warn(
"Passing filename_template to a log handler is deprecated and has no effect",
RemovedInAirflow3Warning,
# We want to reference the stack that actually instantiates the
# handler, not the one that calls super()__init__.
stacklevel=(2 if isinstance(self, FileTaskHandler) else 3),
)
self.maintain_propagate: bool = False
self.max_bytes = max_bytes
self.backup_count = backup_count
Expand Down
1 change: 1 addition & 0 deletions newsfragments/41552.significant.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Removed deprecated ``filename_template`` argument from ``airflow.utils.log.file_task_handler.FileTaskHandler``.
8 changes: 0 additions & 8 deletions tests/utils/test_log_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
from requests.adapters import Response

from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.executors import executor_loader
from airflow.jobs.job import Job
from airflow.jobs.triggerer_job_runner import TriggererJobRunner
Expand Down Expand Up @@ -79,13 +78,6 @@ def setup_method(self):
def teardown_method(self):
self.clean_up()

def test_deprecated_filename_template(self):
with pytest.warns(
RemovedInAirflow3Warning,
match="Passing filename_template to a log handler is deprecated and has no effect",
):
FileTaskHandler("", filename_template="/foo/bar")

def test_default_task_logging_setup(self):
# file task handler is used by default.
logger = logging.getLogger(TASK_LOGGER)
Expand Down

0 comments on commit 716c430

Please sign in to comment.