From 6df20ce7d6d40f70ceef3b87a378bf178e9d34ad Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 18:33:31 +0300 Subject: [PATCH 01/76] ci: introduce ruff --- .pre-commit-config.yaml | 25 +++-------- poetry.lock | 96 ++++++++++++----------------------------- pyproject.toml | 91 +++++++++++++++++++++++++++++++++++++- 3 files changed, 124 insertions(+), 88 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 59639ac35..4cfcb0651 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,12 @@ repos: - - repo: https://github.com/pycqa/isort - rev: 5.12.0 + - repo: local hooks: - - id: isort - args: ["--settings", "setup.cfg"] - exclude: ^tests/.*snapshots/ + - id: ruff + name: ruff + entry: ruff + args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix ] + language: system + types_or: [python] - repo: local hooks: - id: black @@ -14,12 +16,6 @@ repos: types_or: [python, pyi] require_serial: true # run once for all files exclude: ^tests/.*snapshots/ - - repo: https://github.com/pycqa/flake8 - rev: 5.0.4 - hooks: - - id: flake8 - args: ["--config", "setup.cfg"] - exclude: ^tests/.*snapshots/ - repo: local hooks: - id: pyright @@ -29,13 +25,6 @@ repos: types: [python] require_serial: true # run once for all files exclude: ^tests/.*snapshots/ - - repo: https://github.com/asottile/pyupgrade - rev: v3.1.0 - hooks: - - id: pyupgrade - stages: [manual] - args: ["--py310-plus"] - exclude: ^tests/.*snapshots/ - repo: local hooks: - id: gen-schema diff --git a/poetry.lock b/poetry.lock index 339e37345..7a5328922 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -349,22 +349,6 @@ files = [ docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] -[[package]] -name = "flake8" -version = "4.0.1" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, -] - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" - [[package]] name = "ghp-import" version = "2.1.0" @@ -473,23 +457,6 @@ files = [ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] -[[package]] -name = "isort" -version = "5.12.0" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, -] - -[package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - [[package]] name = "jinja2" version = "3.1.2" @@ -606,17 +573,6 @@ chardet = ">=3.0.4,<6" [package.extras] test = ["Faker (>=1.0.2)", "pytest (>=6.0.1)", "pytest-md-report (>=0.1)"] -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = "*" -files = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] - [[package]] name = "mergedeep" version = "1.3.4" @@ -869,17 +825,6 @@ pyyaml = ">=5.1" toml = "*" virtualenv = ">=20.0.8" -[[package]] -name = "pycodestyle" -version = "2.8.0" -description = "Python style guide checker" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, -] - [[package]] name = "pydantic" version = "1.10.8" @@ -933,17 +878,6 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] -[[package]] -name = "pyflakes" -version = "2.4.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, -] - [[package]] name = "pygments" version = "2.14.0" @@ -1449,6 +1383,32 @@ pygments = ">=2.6.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +[[package]] +name = "ruff" +version = "0.0.291" +description = "An extremely fast Python linter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.291-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b97d0d7c136a85badbc7fd8397fdbb336e9409b01c07027622f28dcd7db366f2"}, + {file = "ruff-0.0.291-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6ab44ea607967171e18aa5c80335237be12f3a1523375fa0cede83c5cf77feb4"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a04b384f2d36f00d5fb55313d52a7d66236531195ef08157a09c4728090f2ef0"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b727c219b43f903875b7503a76c86237a00d1a39579bb3e21ce027eec9534051"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87671e33175ae949702774071b35ed4937da06f11851af75cd087e1b5a488ac4"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b75f5801547f79b7541d72a211949754c21dc0705c70eddf7f21c88a64de8b97"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b09b94efdcd162fe32b472b2dd5bf1c969fcc15b8ff52f478b048f41d4590e09"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d5b56bc3a2f83a7a1d7f4447c54d8d3db52021f726fdd55d549ca87bca5d747"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13f0d88e5f367b2dc8c7d90a8afdcfff9dd7d174e324fd3ed8e0b5cb5dc9b7f6"}, + {file = "ruff-0.0.291-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b3eeee1b1a45a247758ecdc3ab26c307336d157aafc61edb98b825cadb153df3"}, + {file = "ruff-0.0.291-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6c06006350c3bb689765d71f810128c9cdf4a1121fd01afc655c87bab4fb4f83"}, + {file = "ruff-0.0.291-py3-none-musllinux_1_2_i686.whl", hash = "sha256:fd17220611047de247b635596e3174f3d7f2becf63bd56301fc758778df9b629"}, + {file = "ruff-0.0.291-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5383ba67ad360caf6060d09012f1fb2ab8bd605ab766d10ca4427a28ab106e0b"}, + {file = "ruff-0.0.291-py3-none-win32.whl", hash = "sha256:1d5f0616ae4cdc7a938b493b6a1a71c8a47d0300c0d65f6e41c281c2f7490ad3"}, + {file = "ruff-0.0.291-py3-none-win_amd64.whl", hash = "sha256:8a69bfbde72db8ca1c43ee3570f59daad155196c3fbe357047cd9b77de65f15b"}, + {file = "ruff-0.0.291-py3-none-win_arm64.whl", hash = "sha256:d867384a4615b7f30b223a849b52104214442b5ba79b473d7edd18da3cde22d6"}, + {file = "ruff-0.0.291.tar.gz", hash = "sha256:c61109661dde9db73469d14a82b42a88c7164f731e6a3b0042e71394c1c7ceed"}, +] + [[package]] name = "setuptools" version = "65.6.3" @@ -1809,4 +1769,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "a97ca69785c7aeb0e04fad209cfd5694c54b5993faca02540679072c7179a875" +content-hash = "3b046d340a48c32ea5c9d7fa6e6687a15173dd97da4c0aa982cc1d48b778012e" diff --git a/pyproject.toml b/pyproject.toml index 4d8a34e1e..ab9f3597a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,9 +45,8 @@ pytest-mock = "^3.10.0" pytest-timeout = "^2.1.0" snapshottest = "^0.6.0" pre-commit = "^2.19.0" -flake8 = "^4.0.1" +ruff = "^0.0.291" black = "^23.7.0" -isort = "^5.12.0" typer-cli = "^0.0.13" pyright = "^1.1.314" pytest-rerunfailures = "^11.1.2" @@ -67,6 +66,94 @@ mike = "^1.1.2" [tool.poetry_bumpversion.file."kpops/__init__.py"] +[tool.ruff] +# E203: whitespace before ':' -- Not PEP8 compliant, black won't correct it +# E501: Line too long -- Clashes with `black` +# D100-D107: Missing docstring for {} -- Inconvenient to enforce +# RUF012: type class attrs with `ClassVar` -- Too strict/trigger-happy +# UP007: `typer` Use X | Y for type annotations -- `typer` doesn't support it +# RET505-RET508: Lots of false positives +# PLR09: upper bound on number of arguments, functions, etc. -- Inconvenient to enforce +# PLR2004: Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce +# TRY002, TRY003: Define your own exception class, avoid using long messages -- Inconvenient to enforce +ignore = [ + "E203", + "E501", + "D100", + "D101", + "D102", + "D103", + "D104", + "D105", + "D106", + "D107", + "RUF012", + "UP007", + "RET505", + "RET506", + "RET507", + "RET508", + "PLR09", + "PLR2004", + "TRY002", + "TRY003", +] +# For a list of all possible rules visit https://beta.ruff.rs/docs/rules/ +# Pyflakes(F), pycodestyle(E,W), mccabe(C90), isort(I), pyupgrade(UP), flake8-bugbear(B), +# flake8-no-pep420(INP), ruff(RUF), pep8-naming (N), flake8-2020 (YTT), flake8-async (ASYNC), +# flake8-blind-except (BLE), flake8-commas (COM), flake8-comprehensions (C4), flake8-debugger (T10), +# flake8-errmsg (EM), flake8-future-annotations (FA), flake8-implicit-str-concat (ISC), +# flake8-import-conventions (ICN), flake8-no-pep420 (INP), flake8-pie (PIE), flake8-pytest-style (PT) +# flake8-quotes (Q), flake8-raise (RSE), flake8-return (RET), flake8-slots (SLOT), +# flake8-simplify (SIM), flake8-type-checking (TCH), flake8-unused-arguments (ARG), +# flake8-use-pathlib (PTH), pygrep-hooks (PGH), Pylint (PL), tryceratops (TRY), refurb (FURB), flake8-logging (LOG) +select = [ + "F", + "E", + "W", + "C90", + "I", + "D", + "UP", + "B", + "INP", + "RUF", + "YTT", + "ASYNC", + "BLE", + "COM", + "C4", + "T10", + "EM", + "FA", + "ISC", + "ICN", + "INP", + "PIE", + "PT", + "Q", + "RSE", + "RET", + "SLOT", + "SIM", + "TCH", + "ARG", + "PTH", + "PGH", + "PL", + "TRY", + "FURB", + "LOG", +] +format = "grouped" +show-fixes = true +task-tags = ["TODO", "HACK", "FIXME", "XXX"] +target-version = "py310" + +[tool.ruff.pydocstyle] +# Uses pep257-compatible docstrings +convention = "pep257" + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" From b94737f80a0edebb04fb94ccb9c469186e792ea1 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 20:29:50 +0300 Subject: [PATCH 02/76] ci: exclude snapshots --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ab9f3597a..390f4db4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -145,10 +145,11 @@ select = [ "FURB", "LOG", ] -format = "grouped" +output-format = "grouped" show-fixes = true task-tags = ["TODO", "HACK", "FIXME", "XXX"] target-version = "py310" +exclude = ["tests/*snapshots/*"] [tool.ruff.pydocstyle] # Uses pep257-compatible docstrings From 0833e1be41e0c29a8bc74dfb5686ba629546b68b Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 20:43:14 +0300 Subject: [PATCH 03/76] ci(ruff): ignore F401 in tests/*/__init__.py --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 390f4db4e..431a76ddc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,6 +98,8 @@ ignore = [ "TRY002", "TRY003", ] +[tool.ruff.extend-per-file-ignores] +"tests/*/__init__.py" = ["F401"] # For a list of all possible rules visit https://beta.ruff.rs/docs/rules/ # Pyflakes(F), pycodestyle(E,W), mccabe(C90), isort(I), pyupgrade(UP), flake8-bugbear(B), # flake8-no-pep420(INP), ruff(RUF), pep8-naming (N), flake8-2020 (YTT), flake8-async (ASYNC), From a4c92a75e64890a5f5b24a5f70f9a6ebc62c41d0 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 20:44:16 +0300 Subject: [PATCH 04/76] ci(ruff): fix order --- pyproject.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 431a76ddc..a0701d610 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,8 +98,6 @@ ignore = [ "TRY002", "TRY003", ] -[tool.ruff.extend-per-file-ignores] -"tests/*/__init__.py" = ["F401"] # For a list of all possible rules visit https://beta.ruff.rs/docs/rules/ # Pyflakes(F), pycodestyle(E,W), mccabe(C90), isort(I), pyupgrade(UP), flake8-bugbear(B), # flake8-no-pep420(INP), ruff(RUF), pep8-naming (N), flake8-2020 (YTT), flake8-async (ASYNC), @@ -153,6 +151,9 @@ task-tags = ["TODO", "HACK", "FIXME", "XXX"] target-version = "py310" exclude = ["tests/*snapshots/*"] +[tool.ruff.extend-per-file-ignores] +"tests/*/__init__.py" = ["F401"] + [tool.ruff.pydocstyle] # Uses pep257-compatible docstrings convention = "pep257" From c8bd70c16018aa62e727f5d24d683de6048a446f Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 20:54:42 +0300 Subject: [PATCH 05/76] ci(ruff): comment out rules in nursery, remove comment --- pyproject.toml | 24 ++++-------------------- 1 file changed, 4 insertions(+), 20 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a0701d610..aa439bea9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,16 +77,9 @@ mike = "^1.1.2" # PLR2004: Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce # TRY002, TRY003: Define your own exception class, avoid using long messages -- Inconvenient to enforce ignore = [ - "E203", + # "E203", # Add when out of nursery "E501", - "D100", - "D101", - "D102", - "D103", - "D104", - "D105", - "D106", - "D107", + "D1", "RUF012", "UP007", "RET505", @@ -98,15 +91,6 @@ ignore = [ "TRY002", "TRY003", ] -# For a list of all possible rules visit https://beta.ruff.rs/docs/rules/ -# Pyflakes(F), pycodestyle(E,W), mccabe(C90), isort(I), pyupgrade(UP), flake8-bugbear(B), -# flake8-no-pep420(INP), ruff(RUF), pep8-naming (N), flake8-2020 (YTT), flake8-async (ASYNC), -# flake8-blind-except (BLE), flake8-commas (COM), flake8-comprehensions (C4), flake8-debugger (T10), -# flake8-errmsg (EM), flake8-future-annotations (FA), flake8-implicit-str-concat (ISC), -# flake8-import-conventions (ICN), flake8-no-pep420 (INP), flake8-pie (PIE), flake8-pytest-style (PT) -# flake8-quotes (Q), flake8-raise (RSE), flake8-return (RET), flake8-slots (SLOT), -# flake8-simplify (SIM), flake8-type-checking (TCH), flake8-unused-arguments (ARG), -# flake8-use-pathlib (PTH), pygrep-hooks (PGH), Pylint (PL), tryceratops (TRY), refurb (FURB), flake8-logging (LOG) select = [ "F", "E", @@ -142,8 +126,8 @@ select = [ "PGH", "PL", "TRY", - "FURB", - "LOG", + # "FURB", # Add when out of nursery + # "LOG", # Add when out of nursery ] output-format = "grouped" show-fixes = true From c7bb733ac9f7c01c0b474f5ee310e0a0991db944 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 20:55:20 +0300 Subject: [PATCH 06/76] ci(ruff): Autofix "D" --- hooks/__init__.py | 2 +- hooks/gen_schema.py | 2 +- kpops/cli/registry.py | 2 +- .../helm_wrapper/dry_run_handler.py | 2 +- kpops/component_handlers/helm_wrapper/helm.py | 4 +-- .../helm_wrapper/helm_diff.py | 2 +- .../component_handlers/helm_wrapper/model.py | 6 ++-- .../component_handlers/helm_wrapper/utils.py | 2 +- .../kafka_connect/connect_wrapper.py | 10 +++--- .../component_handlers/kafka_connect/model.py | 2 +- .../kafka_connect/timeout.py | 2 +- .../component_handlers/topic/proxy_wrapper.py | 10 +++--- .../base_defaults_component.py | 10 +++--- kpops/components/base_components/kafka_app.py | 12 +++---- .../base_components/kafka_connector.py | 24 ++++++------- .../base_components/kubernetes_app.py | 20 +++++------ .../base_components/models/from_section.py | 8 ++--- .../base_components/models/to_section.py | 8 ++--- .../base_components/pipeline_component.py | 34 +++++++++---------- .../streams_bootstrap/producer/model.py | 4 +-- .../producer/producer_app.py | 2 +- .../streams_bootstrap/streams/model.py | 6 ++-- .../streams_bootstrap/streams/streams_app.py | 4 +-- kpops/pipeline_generator/pipeline.py | 16 ++++----- kpops/utils/dict_differ.py | 4 +-- kpops/utils/dict_ops.py | 6 ++-- kpops/utils/docstring.py | 6 ++-- kpops/utils/gen_schema.py | 4 +-- tests/cli/test_schema_generation.py | 2 +- 29 files changed, 105 insertions(+), 111 deletions(-) diff --git a/hooks/__init__.py b/hooks/__init__.py index 0ae8ea143..ef17ce38a 100644 --- a/hooks/__init__.py +++ b/hooks/__init__.py @@ -1,4 +1,4 @@ -"""KPOps pre-commit hooks""" +"""KPOps pre-commit hooks.""" from pathlib import Path PATH_ROOT = Path(__file__).parents[1] diff --git a/hooks/gen_schema.py b/hooks/gen_schema.py index 8fc24f938..438af41ee 100644 --- a/hooks/gen_schema.py +++ b/hooks/gen_schema.py @@ -1,4 +1,4 @@ -"""Generates the stock KPOps editor integration schemas""" +"""Generates the stock KPOps editor integration schemas.""" from contextlib import redirect_stdout from io import StringIO from pathlib import Path diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index 410aa1be5..3d7e83195 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -29,7 +29,7 @@ class Registry: def find_components(self, module_name: str) -> None: """ Find all PipelineComponent subclasses in module - :param module_name: name of the python module + :param module_name: name of the python module. """ for _class in _find_classes(module_name, PipelineComponent): self._classes[_class.type] = _class diff --git a/kpops/component_handlers/helm_wrapper/dry_run_handler.py b/kpops/component_handlers/helm_wrapper/dry_run_handler.py index 8e260f7df..2d28957b7 100644 --- a/kpops/component_handlers/helm_wrapper/dry_run_handler.py +++ b/kpops/component_handlers/helm_wrapper/dry_run_handler.py @@ -11,7 +11,7 @@ def __init__(self, helm: Helm, helm_diff: HelmDiff, namespace: str) -> None: self.namespace = namespace def print_helm_diff(self, stdout: str, helm_release_name: str, log: Logger) -> None: - """Print the diff of the last and current release of this component + """Print the diff of the last and current release of this component. :param stdout: The output of a Helm command that installs or upgrades the release :param helm_release_name: The Helm release name diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 2ad3f5f01..ed4adeba3 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -77,7 +77,7 @@ def upgrade_install( values: dict, flags: HelmUpgradeInstallFlags = HelmUpgradeInstallFlags(), ) -> str: - """Prepares and executes the `helm upgrade --install` command""" + """Prepares and executes the `helm upgrade --install` command.""" with tempfile.NamedTemporaryFile("w") as values_file: yaml.safe_dump(values, values_file) @@ -103,7 +103,7 @@ def uninstall( release_name: str, dry_run: bool, ) -> str | None: - """Prepares and executes the helm uninstall command""" + """Prepares and executes the helm uninstall command.""" command = [ "helm", "uninstall", diff --git a/kpops/component_handlers/helm_wrapper/helm_diff.py b/kpops/component_handlers/helm_wrapper/helm_diff.py index e778a7df2..7c49e2ef0 100644 --- a/kpops/component_handlers/helm_wrapper/helm_diff.py +++ b/kpops/component_handlers/helm_wrapper/helm_diff.py @@ -17,7 +17,7 @@ def calculate_changes( current_release: Iterable[HelmTemplate], new_release: Iterable[HelmTemplate], ) -> Iterator[Change[dict]]: - """Compare 2 releases and generate a Change object for each difference + """Compare 2 releases and generate a Change object for each difference. :param current_release: Iterable containing HelmTemplate objects for the current release :param new_release: Iterable containing HelmTemplate objects for the new release diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index a8aaf8906..5c47c8db2 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -20,7 +20,7 @@ class HelmDiffConfig(BaseModel): class RepoAuthFlags(BaseModel): - """Authorisation-related flags for `helm repo` + """Authorisation-related flags for `helm repo`. :param username: Username, defaults to None :param password: Password, defaults to None @@ -65,7 +65,7 @@ def to_command(self) -> list[str]: class HelmRepoConfig(BaseModel): - """Helm repository configuration + """Helm repository configuration. :param repository_name: Name of the Helm repository :param url: URL to the Helm repository @@ -85,7 +85,7 @@ class Config(DescConfig): class HelmConfig(BaseModel): - """Global Helm configuration + """Global Helm configuration. :param context: Name of kubeconfig context (`--kube-context`) :param debug: Run Helm in Debug mode diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index d39536041..aefd00870 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -11,7 +11,7 @@ def trim_release_name(name: str, suffix: str = "") -> str: Trim Helm release name while preserving suffix. :param name: The release name including optional suffix :param suffix: The release suffix to preserve - :return: Truncated release name + :return: Truncated release name. """ if len(name) > RELEASE_NAME_MAX_LEN: new_name = name[: (RELEASE_NAME_MAX_LEN - len(suffix))] + suffix diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 9a3dd307e..c754d910e 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -20,9 +20,7 @@ class ConnectWrapper: - """ - Wraps Kafka Connect APIs - """ + """Wraps Kafka Connect APIs.""" def __init__(self, host: str | None): if not host: @@ -44,7 +42,7 @@ def create_connector( Creates a new connector API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#post--connectors :param connector_config: The config of the connector - :return: The current connector info if successful + :return: The current connector info if successful. """ config_json = connector_config.dict() connect_data = {"name": connector_config.name, "config": config_json} @@ -68,7 +66,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: Get information about the connector. API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector - :return: Information about the connector + :return: Information about the connector. """ response = httpx.get( url=f"{self._host}/connectors/{connector_name}", headers=HEADERS @@ -153,7 +151,7 @@ def validate_connector_config( def delete_connector(self, connector_name: str) -> None: """ Deletes a connector, halting all tasks and deleting its configuration. - API Reference:https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)- + API Reference:https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)-. """ response = httpx.delete( url=f"{self._host}/connectors/{connector_name}", headers=HEADERS diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 9feed448f..99964d3c5 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -13,7 +13,7 @@ class KafkaConnectorType(str, Enum): class KafkaConnectorConfig(BaseModel): - """Settings specific to Kafka Connectors""" + """Settings specific to Kafka Connectors.""" connector_class: str name: str = Field(default=..., hidden_from_schema=True) diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index d93d608b7..c1f1bcdab 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -12,7 +12,7 @@ def timeout(func: Callable[..., T], *, secs: int = 0) -> T | None: """ Sets a timeout for a given lambda function :param func: The callable function - :param secs: The timeout in seconds + :param secs: The timeout in seconds. """ async def main_supervisor(func: Callable[..., T], secs: int) -> T: diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index af7914379..dc19656e4 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -21,9 +21,7 @@ class ProxyWrapper: - """ - Wraps Kafka REST Proxy APIs - """ + """Wraps Kafka REST Proxy APIs.""" def __init__(self, pipeline_config: PipelineConfig) -> None: if not pipeline_config.kafka_rest_host: @@ -38,7 +36,7 @@ def cluster_id(self) -> str: """ Gets the Kafka cluster ID by sending a requests to Kafka REST proxy. More information about the cluster ID can be found here: - https://docs.confluent.io/platform/current/kafka-rest/api.html#cluster-v3 + https://docs.confluent.io/platform/current/kafka-rest/api.html#cluster-v3. Currently both Kafka and Kafka REST Proxy are only aware of the Kafka cluster pointed at by the bootstrap.servers configuration. Therefore, only one Kafka cluster will be returned. @@ -77,7 +75,7 @@ def delete_topic(self, topic_name: str) -> None: """ Deletes a topic API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#delete--clusters-cluster_id-topics-topic_name - :param topic_name: Name of the topic + :param topic_name: Name of the topic. """ response = httpx.delete( url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}", @@ -94,7 +92,7 @@ def get_topic(self, topic_name: str) -> TopicResponse: Returns the topic with the given topic_name. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#get--clusters-cluster_id-topics-topic_name :param topic_name: The topic name. - :return: Response of the get topic API + :return: Response of the get topic API. """ response = httpx.get( url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}", diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 99dec42f2..a02cc1417 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -78,14 +78,14 @@ def __init__(self, **kwargs) -> None: @cached_classproperty def type(cls: type[Self]) -> str: # pyright: ignore - """Return calling component's type + """Return calling component's type. :returns: Component class name in dash-case """ return to_dash(cls.__name__) def extend_with_defaults(self, **kwargs) -> dict: - """Merge parent components' defaults with own + """Merge parent components' defaults with own. :param kwargs: The init kwargs for pydantic :returns: Enriched kwargs with inheritted defaults @@ -121,7 +121,7 @@ def load_defaults( defaults_file_path: Path, environment_defaults_file_path: Path | None = None, ) -> dict: - """Resolve component-specific defaults including environment defaults + """Resolve component-specific defaults including environment defaults. :param component_class: Component class :param defaults_file_path: Path to `defaults.yaml` @@ -153,7 +153,7 @@ def load_defaults( def defaults_from_yaml(path: Path, key: str) -> dict: - """Read component-specific settings from a defaults yaml file and return @default if not found + """Read component-specific settings from a defaults yaml file and return @default if not found. :param path: Path to defaults yaml file :param key: Component type @@ -178,7 +178,7 @@ def defaults_from_yaml(path: Path, key: str) -> dict: def get_defaults_file_paths(config: PipelineConfig) -> tuple[Path, Path]: - """Return the paths to the main and the environment defaults-files + """Return the paths to the main and the environment defaults-files. The files need not exist, this function will only check if the dir set in `config.defaults_path` exists and return paths to the defaults files diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index c522e040e..c6919bf3e 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -22,7 +22,7 @@ class KafkaStreamsConfig(BaseModel): - """Kafka Streams config + """Kafka Streams config. :param brokers: Brokers :param schema_registry_url: URL of the schema registry, defaults to None @@ -38,7 +38,7 @@ class Config(CamelCaseConfig, DescConfig): class KafkaAppConfig(KubernetesAppConfig): - """Settings specific to Kafka Apps + """Settings specific to Kafka Apps. :param streams: Kafka streams config :param name_override: Override name with this value, defaults to None @@ -82,7 +82,7 @@ class KafkaApp(KubernetesApp, ABC): @property def clean_up_helm_chart(self) -> str: - """Helm chart used to destroy and clean this component""" + """Helm chart used to destroy and clean this component.""" raise NotImplementedError() @override @@ -104,7 +104,7 @@ def _run_clean_up_job( dry_run: bool, retain_clean_jobs: bool = False, ) -> None: - """Clean an app using the respective cleanup job + """Clean an app using the respective cleanup job. :param values: The value YAML for the chart :param dry_run: Dry run command @@ -133,7 +133,7 @@ def _run_clean_up_job( self.__uninstall_clean_up_job(clean_up_release_name, dry_run) def __uninstall_clean_up_job(self, release_name: str, dry_run: bool) -> None: - """Uninstall clean up job + """Uninstall clean up job. :param release_name: Name of the Helm release :param dry_run: Whether to do a dry run of the command @@ -147,7 +147,7 @@ def __install_clean_up_job( values: dict, dry_run: bool, ) -> str: - """Install clean up job + """Install clean up job. :param release_name: Name of the Helm release :param suffix: Suffix to add to the release name, e.g. "-clean" diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index e53886d68..73a0d1e38 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -34,7 +34,7 @@ class KafkaConnector(PipelineComponent, ABC): - """Base class for all Kafka connectors + """Base class for all Kafka connectors. Should only be used to set defaults @@ -91,7 +91,7 @@ def connector_config_should_have_component_name( @cached_property def helm(self) -> Helm: - """Helm object that contains component-specific config such as repo""" + """Helm object that contains component-specific config such as repo.""" helm_repo_config = self.repo_config helm = Helm(self.config.helm_config) helm.add_repo( @@ -119,7 +119,7 @@ def dry_run_handler(self) -> DryRunHandler: @property def helm_flags(self) -> HelmFlags: - """Return shared flags for Helm commands""" + """Return shared flags for Helm commands.""" return HelmFlags( **self.repo_config.repo_auth_flags.dict(), version=self.version, @@ -128,7 +128,7 @@ def helm_flags(self) -> HelmFlags: @property def template_flags(self) -> HelmTemplateFlags: - """Return flags for Helm template command""" + """Return flags for Helm template command.""" return HelmTemplateFlags( **self.helm_flags.dict(), api_version=self.config.helm_config.api_version, @@ -169,7 +169,7 @@ def _run_connect_resetter( retain_clean_jobs: bool, **kwargs, ) -> None: - """Clean the connector from the cluster + """Clean the connector from the cluster. At first, it deletes the previous cleanup job (connector resetter) to make sure that there is no running clean job in the cluster. Then it releases a cleanup job. @@ -208,7 +208,7 @@ def __install_connect_resetter( dry_run: bool, **kwargs, ) -> str: - """Install connector resetter + """Install connector resetter. :param dry_run: Whether to dry run the command :return: The output of `helm upgrade --install` @@ -233,7 +233,7 @@ def _get_kafka_connect_resetter_values( self, **kwargs, ) -> dict: - """Get connector resetter helm chart values + """Get connector resetter helm chart values. :return: The Helm chart values of the connector resetter """ @@ -251,7 +251,7 @@ def _get_kafka_connect_resetter_values( } def __uninstall_connect_resetter(self, release_name: str, dry_run: bool) -> None: - """Uninstall connector resetter + """Uninstall connector resetter. :param release_name: Name of the release to be uninstalled :param dry_run: Whether to do a dry run of the command @@ -264,7 +264,7 @@ def __uninstall_connect_resetter(self, release_name: str, dry_run: bool) -> None class KafkaSourceConnector(KafkaConnector): - """Kafka source connector model + """Kafka source connector model. :param offset_topic: offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running, @@ -306,7 +306,7 @@ def clean(self, dry_run: bool) -> None: self.__run_kafka_connect_resetter(dry_run) def __run_kafka_connect_resetter(self, dry_run: bool) -> None: - """Runs the connector resetter + """Runs the connector resetter. :param dry_run: Whether to do a dry run of the command """ @@ -318,7 +318,7 @@ def __run_kafka_connect_resetter(self, dry_run: bool) -> None: class KafkaSinkConnector(KafkaConnector): - """Kafka sink connector model""" + """Kafka sink connector model.""" _connector_type = KafkaConnectorType.SINK @@ -361,7 +361,7 @@ def clean(self, dry_run: bool) -> None: def __run_kafka_connect_resetter( self, dry_run: bool, delete_consumer_group: bool ) -> None: - """Runs the connector resetter + """Runs the connector resetter. :param dry_run: Whether to do a dry run of the command :param delete_consumer_group: Whether the consumer group should be deleted or not diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 88ef8380d..57c51a187 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -30,7 +30,7 @@ class KubernetesAppConfig(BaseModel): - """Settings specific to Kubernetes Apps""" + """Settings specific to Kubernetes Apps.""" class Config(CamelCaseConfig, DescConfig): extra = Extra.allow @@ -68,7 +68,7 @@ class KubernetesApp(PipelineComponent): @cached_property def helm(self) -> Helm: - """Helm object that contains component-specific config such as repo""" + """Helm object that contains component-specific config such as repo.""" helm = Helm(self.config.helm_config) if self.repo_config is not None: helm.add_repo( @@ -80,7 +80,7 @@ def helm(self) -> Helm: @cached_property def helm_diff(self) -> HelmDiff: - """Helm diff object of last and current release of this component""" + """Helm diff object of last and current release of this component.""" return HelmDiff(self.config.helm_diff_config) @cached_property @@ -95,14 +95,14 @@ def helm_release_name(self) -> str: @property def helm_chart(self) -> str: - """Return component's Helm chart""" + """Return component's Helm chart.""" raise NotImplementedError( f"Please implement the helm_chart property of the {self.__module__} module." ) @property def helm_flags(self) -> HelmFlags: - """Return shared flags for Helm commands""" + """Return shared flags for Helm commands.""" auth_flags = self.repo_config.repo_auth_flags.dict() if self.repo_config else {} return HelmFlags( **auth_flags, @@ -112,7 +112,7 @@ def helm_flags(self) -> HelmFlags: @property def template_flags(self) -> HelmTemplateFlags: - """Return flags for Helm template command""" + """Return flags for Helm template command.""" return HelmTemplateFlags( **self.helm_flags.dict(), api_version=self.config.helm_config.api_version, @@ -131,7 +131,7 @@ def template(self) -> None: @property def deploy_flags(self) -> HelmUpgradeInstallFlags: - """Return flags for Helm upgrade install command""" + """Return flags for Helm upgrade install command.""" return HelmUpgradeInstallFlags(**self.helm_flags.dict()) @override @@ -159,14 +159,14 @@ def destroy(self, dry_run: bool) -> None: log.info(magentaify(stdout)) def to_helm_values(self) -> dict: - """Generate a dictionary of values readable by Helm from `self.app` + """Generate a dictionary of values readable by Helm from `self.app`. :returns: Thte values to be used by Helm """ return self.app.dict(by_alias=True, exclude_none=True, exclude_defaults=True) def print_helm_diff(self, stdout: str) -> None: - """Print the diff of the last and current release of this component + """Print the diff of the last and current release of this component. :param stdout: The output of a Helm command that installs or upgrades the release """ @@ -187,7 +187,7 @@ def _validate_custom(self, **kwargs) -> None: @staticmethod def validate_kubernetes_name(name: str) -> None: - """Check if a name is valid for a Kubernetes resource + """Check if a name is valid for a Kubernetes resource. :param name: Name that is to be used for the resource :raises ValueError: The component name {name} is invalid for Kubernetes. diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index a3188a17b..fdef7782f 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -9,7 +9,7 @@ class InputTopicTypes(str, Enum): - """Input topic types + """Input topic types. INPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern) """ @@ -19,7 +19,7 @@ class InputTopicTypes(str, Enum): class FromTopic(BaseModel): - """Input topic + """Input topic. :param type: Topic type, defaults to None :param role: Custom identifier belonging to a topic; @@ -37,7 +37,7 @@ class Config(DescConfig): @root_validator def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: - """Ensure that cls.role is used correctly, assign type if needed""" + """Ensure that cls.role is used correctly, assign type if needed.""" if values["type"] == InputTopicTypes.INPUT and values["role"]: raise ValueError("Define role only if `type` is `pattern` or `None`") return values @@ -47,7 +47,7 @@ def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: class FromSection(BaseModel): - """Holds multiple input topics + """Holds multiple input topics. :param topics: Input topics :param components: Components to read from diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index cbad0987a..c10f27c23 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -9,7 +9,7 @@ class OutputTopicTypes(str, Enum): - """Types of output topic + """Types of output topic. OUTPUT (output topic), ERROR (error topic) """ @@ -19,7 +19,7 @@ class OutputTopicTypes(str, Enum): class TopicConfig(BaseModel): - """Configure an output topic + """Configure an output topic. :param type: Topic type :param key_schema: Key schema class name @@ -65,14 +65,14 @@ class Config(DescConfig): @root_validator def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: - """Ensure that cls.role is used correctly, assign type if needed""" + """Ensure that cls.role is used correctly, assign type if needed.""" if values["type"] and values["role"]: raise ValueError("Define `role` only if `type` is undefined") return values class ToSection(BaseModel): - """Holds multiple output topics + """Holds multiple output topics. :param topics: Output topics :param models: Data models diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index cf4bafa52..d41b1cf74 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -22,7 +22,7 @@ class PipelineComponent(BaseDefaultsComponent, ABC): - """Base class for all components + """Base class for all components. :param name: Component name :param prefix: Pipeline prefix that will prefix every component name. @@ -76,39 +76,39 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: """ def set_input_pattern(self, name: str) -> None: - """Set input pattern + """Set input pattern. :param name: Input pattern name """ def add_extra_input_pattern(self, role: str, topic: str) -> None: - """Add an input pattern of type extra + """Add an input pattern of type extra. :param role: Custom identifier belonging to one or multiple topics :param topic: Topic name """ def set_output_topic(self, topic_name: str) -> None: - """Set output topic + """Set output topic. :param topic_name: Output topic name """ def set_error_topic(self, topic_name: str) -> None: - """Set error topic + """Set error topic. :param topic_name: Error topic name """ def add_extra_output_topic(self, topic_name: str, role: str) -> None: - """Add an output topic of type extra + """Add an output topic of type extra. :param topic_name: Output topic name :param role: Role that is unique to the extra output topic """ def set_input_topics(self) -> None: - """Put values of config.from into the streams config section of streams bootstrap + """Put values of config.from into the streams config section of streams bootstrap. Supports extra_input_topics (topics by role) or input_topics. """ @@ -117,7 +117,7 @@ def set_input_topics(self) -> None: self.apply_from_inputs(name, topic) def apply_from_inputs(self, name: str, topic: FromTopic) -> None: - """Add a `from` section input to the component config + """Add a `from` section input to the component config. :param name: Name of the field :param topic: Value of the field @@ -133,7 +133,7 @@ def apply_from_inputs(self, name: str, topic: FromTopic) -> None: self.add_input_topics([name]) def set_output_topics(self) -> None: - """Put values of config.to into the producer config section of streams bootstrap + """Put values of config.to into the producer config section of streams bootstrap. Supports extra_output_topics (topics by role) or output_topics. """ @@ -142,7 +142,7 @@ def set_output_topics(self) -> None: self.apply_to_outputs(name, topic) def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: - """Add a `to` section input to the component config + """Add a `to` section input to the component config. :param name: Name of the field :param topic: Value of the field @@ -160,7 +160,7 @@ def weave_from_topics( to: ToSection | None, from_topic: FromTopic = FromTopic(type=InputTopicTypes.INPUT), ) -> None: - """Weave output topics of upstream component or from component into config + """Weave output topics of upstream component or from component into config. Override this method to apply custom logic """ @@ -175,7 +175,7 @@ def weave_from_topics( self.apply_from_inputs(input_topic, from_topic) def inflate(self) -> list[PipelineComponent]: - """Inflate a component + """Inflate a component. This is helpful if one component should result in multiple components. To support this, override this method and return a list of components @@ -186,7 +186,7 @@ def inflate(self) -> list[PipelineComponent]: def template(self) -> None: """ - Runs `helm template` + Runs `helm template`. From HELM: Render chart templates locally and display the output. Any values that would normally be looked up or retrieved in-cluster will @@ -195,25 +195,25 @@ def template(self) -> None: """ def deploy(self, dry_run: bool) -> None: - """Deploy the component (self) to the k8s cluster + """Deploy the component (self) to the k8s cluster. :param dry_run: Whether to do a dry run of the command """ def destroy(self, dry_run: bool) -> None: - """Uninstall the component (self) from the k8s cluster + """Uninstall the component (self) from the k8s cluster. :param dry_run: Whether to do a dry run of the command """ def reset(self, dry_run: bool) -> None: - """Reset component (self) state + """Reset component (self) state. :param dry_run: Whether to do a dry run of the command """ def clean(self, dry_run: bool) -> None: - """Remove component (self) and any trace of it + """Remove component (self) and any trace of it. :param dry_run: Whether to do a dry run of the command """ diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 3c4ae6e46..8af1a68c6 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -8,7 +8,7 @@ class ProducerStreamsConfig(KafkaStreamsConfig): - """Kafka Streams settings specific to Producer + """Kafka Streams settings specific to Producer. :param extra_output_topics: Extra output topics :param output_topic: Output topic, defaults to None @@ -23,7 +23,7 @@ class ProducerStreamsConfig(KafkaStreamsConfig): class ProducerValues(KafkaAppConfig): - """Settings specific to producers + """Settings specific to producers. :param streams: Kafka Streams settings """ diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 022ff3e5e..3a513c5a5 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -14,7 +14,7 @@ class ProducerApp(KafkaApp): - """Producer component + """Producer component. This producer holds configuration to use as values for the streams bootstrap producer helm chart. diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index aabbe8237..34fcd1d3e 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -13,7 +13,7 @@ class StreamsConfig(KafkaStreamsConfig): - """Streams Bootstrap streams section + """Streams Bootstrap streams section. :param input_topics: Input topics, defaults to [] :param input_pattern: Input pattern, defaults to None @@ -82,7 +82,7 @@ def dict( exclude_unset: bool = False, **kwargs, ) -> dict: - """Generate a dictionary representation of the model + """Generate a dictionary representation of the model. Optionally, specify which fields to include or exclude. @@ -105,7 +105,7 @@ def dict( class StreamsAppAutoScaling(BaseModel): - """Kubernetes Event-driven Autoscaling config + """Kubernetes Event-driven Autoscaling config. :param enabled: Whether to enable auto-scaling using KEDA., defaults to False :param consumer_group: Name of the consumer group used for checking the diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index 956980ff7..fb65d9ab8 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -10,7 +10,7 @@ class StreamsApp(KafkaApp): - """StreamsApp component that configures a streams bootstrap app + """StreamsApp component that configures a streams bootstrap app. :param app: Application-specific settings """ @@ -67,7 +67,7 @@ def clean(self, dry_run: bool) -> None: self.__run_streams_clean_up_job(dry_run, delete_output=True) def __run_streams_clean_up_job(self, dry_run: bool, delete_output: bool) -> None: - """Run clean job for this Streams app + """Run clean job for this Streams app. :param dry_run: Whether to do a dry run of the command :param delete_output: Whether to delete the output of the app that is being cleaned diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 093a452ea..f941f80de 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -32,7 +32,7 @@ class ValidationError(Exception): class PipelineComponents(BaseModel): - """Stores the pipeline components""" + """Stores the pipeline components.""" components: list[PipelineComponent] = [] @@ -79,7 +79,7 @@ def _populate_component_name(component: PipelineComponent) -> None: # TODO: rem def create_env_components_index( environment_components: list[dict], ) -> dict[str, dict]: - """Create an index for all registered components in the project + """Create an index for all registered components in the project. :param environment_components: List of all components to be included :return: component index @@ -120,7 +120,7 @@ def load_from_yaml( config: PipelineConfig, handlers: ComponentHandlers, ) -> Pipeline: - """Load pipeline definition from yaml + """Load pipeline definition from yaml. The file is often named ``pipeline.yaml`` @@ -152,7 +152,7 @@ def load_from_yaml( return pipeline def parse_components(self, component_list: list[dict]) -> None: - """Instantiate, enrich and inflate a list of components + """Instantiate, enrich and inflate a list of components. :param component_list: List of components :raises ValueError: Every component must have a type defined @@ -224,7 +224,7 @@ def enrich_component( self, component: PipelineComponent, ) -> PipelineComponent: - """Enrich a pipeline component with env-specific config and substitute variables + """Enrich a pipeline component with env-specific config and substitute variables. :param component: Component to be enriched :returns: Enriched component @@ -249,7 +249,7 @@ def enrich_component( ) def print_yaml(self, substitution: dict | None = None) -> None: - """Print the generated pipeline definition + """Print the generated pipeline definition. :param substitution: Substitution dictionary, defaults to None """ @@ -277,7 +277,7 @@ def __len__(self) -> int: return len(self.components) def substitute_in_component(self, component_as_dict: dict) -> dict: - """Substitute all $-placeholders in a component in dict representation + """Substitute all $-placeholders in a component in dict representation. :param component_as_dict: Component represented as dict :return: Updated component @@ -311,7 +311,7 @@ def validate(self) -> None: @staticmethod def pipeline_filename_environment(path: Path, config: PipelineConfig) -> Path: - """Add the environment name from the PipelineConfig to the pipeline.yaml path + """Add the environment name from the PipelineConfig to the pipeline.yaml path. :param path: Path to pipeline.yaml file :param config: The PipelineConfig diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index 2cdaa95b0..707ea0bd0 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -64,9 +64,7 @@ def from_dicts( @staticmethod def __find_changed_key(key_1: list[str] | str, key_2: str = "") -> str: - """ - Generates a string that points to the changed key in the dictionary. - """ + """Generates a string that points to the changed key in the dictionary.""" if isinstance(key_1, list) and len(key_1) > 1: return f"{key_1[0]}[{key_1[1]}]" if not key_1: diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index 64e88a89b..1b081cb7f 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -2,7 +2,7 @@ def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: - """Nested update for 2 dictionaries + """Nested update for 2 dictionaries. Adds all new fields in ``other_dict`` to ``original_dict``. Does not update existing fields. @@ -48,7 +48,7 @@ def update_nested(*argv: dict) -> dict: def flatten_mapping( nested_mapping: Mapping[str, Any], prefix: str | None = None, separator: str = "_" ) -> dict[str, Any]: - """Flattens a Mapping + """Flattens a Mapping. :param nested_mapping: Nested mapping that is to be flattened :param prefix: Prefix that will be applied to all top-level keys in the output., defaults to None @@ -76,7 +76,7 @@ def generate_substitution( prefix: str | None = None, existing_substitution: dict | None = None, ) -> dict: - """Generate a complete substitution dict from a given dict + """Generate a complete substitution dict from a given dict. Finds all attributes that belong to a model and expands them to create a dict containing each variable name and value to substitute with. diff --git a/kpops/utils/docstring.py b/kpops/utils/docstring.py index fc6f4c61d..d5ca287d3 100644 --- a/kpops/utils/docstring.py +++ b/kpops/utils/docstring.py @@ -4,7 +4,7 @@ def describe_attr(name: str, docstr: str | None) -> str: - """Read attribute description from class docstring + """Read attribute description from class docstring. **Works only with reStructuredText docstrings.** @@ -19,7 +19,7 @@ def describe_attr(name: str, docstr: str | None) -> str: def describe_object(docstr: str | None) -> str: - """Return description from an object's docstring + """Return description from an object's docstring. Excludes parameters and return definitions @@ -44,7 +44,7 @@ def describe_object(docstr: str | None) -> str: def _trim_description_end(desc: str) -> str: - """Remove the unwanted text that comes after a description in a docstring + """Remove the unwanted text that comes after a description in a docstring. Also removes all whitespaces and newlines and replaces them with a single space. diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 470a1412d..1cddd830e 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -58,7 +58,7 @@ def _is_valid_component( def _add_components( components_module: str, components: tuple[type[PipelineComponent]] | None = None ) -> tuple[type[PipelineComponent]]: - """Add components to a components tuple + """Add components to a components tuple. If an empty tuple is provided or it is not provided at all, the components types from the given module are 'tupled' @@ -137,7 +137,7 @@ def gen_pipeline_schema( def gen_config_schema() -> None: - """Generate a json schema from the model of pipeline config""" + """Generate a json schema from the model of pipeline config.""" schema = schema_json_of( PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True ) diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index 6c651dfa4..b8b248035 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -55,7 +55,7 @@ class SubPipelineComponentCorrect(SubPipelineComponent): # Correctly defined, docstr test class SubPipelineComponentCorrectDocstr(SubPipelineComponent): """ - Newline before title is removed + Newline before title is removed. Summarry is correctly imported. All From 6379c78655e2574463e46315ce946d118d6dd9cd Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 21:09:25 +0300 Subject: [PATCH 07/76] ci(ruff): improve comments --- pyproject.toml | 107 ++++++++++++++++++++++--------------------------- 1 file changed, 49 insertions(+), 58 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index aa439bea9..2bc361142 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,67 +67,58 @@ mike = "^1.1.2" [tool.poetry_bumpversion.file."kpops/__init__.py"] [tool.ruff] -# E203: whitespace before ':' -- Not PEP8 compliant, black won't correct it -# E501: Line too long -- Clashes with `black` -# D100-D107: Missing docstring for {} -- Inconvenient to enforce -# RUF012: type class attrs with `ClassVar` -- Too strict/trigger-happy -# UP007: `typer` Use X | Y for type annotations -- `typer` doesn't support it -# RET505-RET508: Lots of false positives -# PLR09: upper bound on number of arguments, functions, etc. -- Inconvenient to enforce -# PLR2004: Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce -# TRY002, TRY003: Define your own exception class, avoid using long messages -- Inconvenient to enforce ignore = [ - # "E203", # Add when out of nursery - "E501", - "D1", - "RUF012", - "UP007", - "RET505", - "RET506", - "RET507", - "RET508", - "PLR09", - "PLR2004", - "TRY002", - "TRY003", + # "E203", # whitespace before ':' -- Not PEP8 compliant, black won't correct it, add when out of nursery + "E501", # Line too long -- Clashes with `black` + "D1", # Missing docstring for {} -- Inconvenient to enforce + "RUF012", # type class attrs with `ClassVar` -- Too strict/trigger-happy + "UP007", # Use X | Y for type annotations -- `typer` doesn't support it + "RET505", # Unnecessary {branch} after return statement -- Lots of false positives + "RET506", # Unnecessary {branch} after raise statement -- Lots of false positives + "RET507", # Unnecessary {branch} after continue statement -- Lots of false positives + "RET508", # Unnecessary {branch} after break statement -- Lots of false positives + "PLR09", # upper bound on number of arguments, functions, etc. -- Inconvenient to enforce + "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce + "TRY002", # Create your own exception -- Inconvenient to enforce + "TRY003", # Avoid specifying long messages outside the exception class -- Inconvenient to enforce ] select = [ - "F", - "E", - "W", - "C90", - "I", - "D", - "UP", - "B", - "INP", - "RUF", - "YTT", - "ASYNC", - "BLE", - "COM", - "C4", - "T10", - "EM", - "FA", - "ISC", - "ICN", - "INP", - "PIE", - "PT", - "Q", - "RSE", - "RET", - "SLOT", - "SIM", - "TCH", - "ARG", - "PTH", - "PGH", - "PL", - "TRY", - # "FURB", # Add when out of nursery - # "LOG", # Add when out of nursery + "F", # Pyflakes + "E", # pycodestyle Errors + "W", # pycodestyle Warnings + "C90", # mccabe + "I", # isort + "D", # pydocstyle + "UP", # pyupgrade + "B", # flake8-bugbear + "INP", # flake8-no-pep420 + "RUF", # Ruff-specific rules + "YTT", # flake8-2020 + "ASYNC", # flake8-async + "BLE", # flake8-blind-except + "COM", # flake8-commas + "C4", # flake8-comprehensions + "T10", # flake8-debugger + "EM", # flake8-errmsg + "FA", # flake8-future-annotations + "ISC", # flake8-implicit-str-concat + "ICN", # flake8-import-conventions + "INP", # flake8-no-pep420 + "PIE", # flake8-pie + "PT", # flake8-pytest-style + "Q", # flake8-quotes + "RSE", # flake8-raise + "RET", # flake8-return + "SLOT", # flake8-slots + "SIM", # flake8-simplify + "TCH", # flake8-type-checking + "ARG", # flake8-unused-arguments + "PTH", # flake8-use-pathlib + "PGH", # pygrep-hooks + "PL", # Pylint + "TRY", # tryceratops + # "FURB", # refurb, add when out of nursery + # "LOG", # flake8-logging, add when out of nursery ] output-format = "grouped" show-fixes = true From c70873c5d12b22622162090e6f8e0873f52fe415 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 22:20:54 +0300 Subject: [PATCH 08/76] ci(ruff-pydocstyle): improve rule selection --- pyproject.toml | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2bc361142..58451787a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,25 @@ ignore = [ # "E203", # whitespace before ':' -- Not PEP8 compliant, black won't correct it, add when out of nursery "E501", # Line too long -- Clashes with `black` "D1", # Missing docstring for {} -- Inconvenient to enforce +# The following "D" rules do not correspond to our coding style. We use the pep257 convention, but +# "D212" should not be ignored. In ruff (0.0.291) we cannot select a rule that is excluded by specifying +# a convention, hence our only option is to manually replicate it. + "D203", # 1 blank line required before class docstring + "D213", # Multi-line docstring summary should start at the second line + "D214", # Section is over-indented ("{name}") + "D215", # Section underline is over-indented ("{name}") + "D404", # First word of the docstring should not be "This" + "D405", # Section name should be properly capitalized ("{name}") + "D406", # Section name should end with a newline ("{name}") + "D407", # Missing dashed underline after section ("{name}") + "D408", # Section underline should be in the line following the section's name ("{name}") + "D409", # Section underline should match the length of its name ("{name}") + "D410", # Missing blank line after section ("{name}") + "D411", # Missing blank line before section ("{name}") + "D413", # Missing blank line after last section ("{name}") + "D415", # First line should end with a period, question mark, or exclamation point + "D416", # Section name should end with a colon ("{name}") + "D417", # Missing argument description in the docstring for {definition}: {name} "RUF012", # type class attrs with `ClassVar` -- Too strict/trigger-happy "UP007", # Use X | Y for type annotations -- `typer` doesn't support it "RET505", # Unnecessary {branch} after return statement -- Lots of false positives @@ -129,10 +148,6 @@ exclude = ["tests/*snapshots/*"] [tool.ruff.extend-per-file-ignores] "tests/*/__init__.py" = ["F401"] -[tool.ruff.pydocstyle] -# Uses pep257-compatible docstrings -convention = "pep257" - [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" From 738e21b1230c182e2d6c37eca4a31e9cff14fa8c Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 22:24:33 +0300 Subject: [PATCH 09/76] ci(ruff): autofix "D" --- kpops/cli/registry.py | 3 +-- .../component_handlers/helm_wrapper/model.py | 3 +-- .../component_handlers/helm_wrapper/utils.py | 3 +-- .../kafka_connect/connect_wrapper.py | 15 +++++-------- .../kafka_connect/kafka_connect_handler.py | 6 ++---- .../kafka_connect/timeout.py | 3 +-- kpops/component_handlers/topic/handler.py | 3 +-- .../component_handlers/topic/proxy_wrapper.py | 21 +++++++------------ .../base_components/pipeline_component.py | 3 +-- kpops/utils/gen_schema.py | 3 +-- tests/cli/test_schema_generation.py | 3 +-- 11 files changed, 22 insertions(+), 44 deletions(-) diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index 3d7e83195..a5a1f82ef 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -27,8 +27,7 @@ class Registry: _classes: ClassDict[PipelineComponent] = field(default_factory=dict, init=False) def find_components(self, module_name: str) -> None: - """ - Find all PipelineComponent subclasses in module + """Find all PipelineComponent subclasses in module :param module_name: name of the python module. """ for _class in _find_classes(module_name, PipelineComponent): diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 5c47c8db2..3c29839a2 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -205,8 +205,7 @@ def __iter__(self) -> Iterator[str]: @property def manifest(self) -> str: - """ - Reads the manifest section of Helm stdout. `helm upgrade --install` output message contains three sections + """Reads the manifest section of Helm stdout. `helm upgrade --install` output message contains three sections in the following order: - HOOKS diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index aefd00870..a04b97f20 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -7,8 +7,7 @@ def trim_release_name(name: str, suffix: str = "") -> str: - """ - Trim Helm release name while preserving suffix. + """Trim Helm release name while preserving suffix. :param name: The release name including optional suffix :param suffix: The release suffix to preserve :return: Truncated release name. diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index c754d910e..a6e5ef3cb 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -38,8 +38,7 @@ def host(self) -> str: def create_connector( self, connector_config: KafkaConnectorConfig ) -> KafkaConnectResponse: - """ - Creates a new connector + """Creates a new connector API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#post--connectors :param connector_config: The config of the connector :return: The current connector info if successful. @@ -62,8 +61,7 @@ def create_connector( raise KafkaConnectError(response) def get_connector(self, connector_name: str) -> KafkaConnectResponse: - """ - Get information about the connector. + """Get information about the connector. API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector :return: Information about the connector. @@ -89,8 +87,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: def update_connector_config( self, connector_config: KafkaConnectorConfig ) -> KafkaConnectResponse: - """ - Create a new connector using the given configuration, or update the configuration for an existing connector. + """Create a new connector using the given configuration, or update the configuration for an existing connector. :param connector_config: Configuration parameters for the connector. :return: Information about the connector after the change has been made. """ @@ -121,8 +118,7 @@ def update_connector_config( def validate_connector_config( self, connector_config: KafkaConnectorConfig ) -> list[str]: - """ - Validate connector config using the given configuration + """Validate connector config using the given configuration :param connector_config: Configuration parameters for the connector. :return: """ @@ -149,8 +145,7 @@ def validate_connector_config( raise KafkaConnectError(response) def delete_connector(self, connector_name: str) -> None: - """ - Deletes a connector, halting all tasks and deleting its configuration. + """Deletes a connector, halting all tasks and deleting its configuration. API Reference:https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)-. """ response = httpx.delete( diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 14f5af076..1268ed6c7 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -36,8 +36,7 @@ def __init__( def create_connector( self, connector_config: KafkaConnectorConfig, *, dry_run: bool ) -> None: - """ - Creates a connector. If the connector exists the config of that connector gets updated. + """Creates a connector. If the connector exists the config of that connector gets updated. :param connector_config: The connector config. :param dry_run: If the connector creation should be run in dry run mode. """ @@ -64,8 +63,7 @@ def create_connector( ) def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None: - """ - Deletes a connector resource from the cluster. + """Deletes a connector resource from the cluster. :param connector_name: The connector name. :param dry_run: If the connector deletion should be run in dry run mode. """ diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index c1f1bcdab..7cdfe5801 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -9,8 +9,7 @@ def timeout(func: Callable[..., T], *, secs: int = 0) -> T | None: - """ - Sets a timeout for a given lambda function + """Sets a timeout for a given lambda function :param func: The callable function :param secs: The timeout in seconds. """ diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 1df0d106a..789b5b249 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -199,8 +199,7 @@ def __dry_run_topic_deletion(self, topic_name: str) -> None: @classmethod def __prepare_body(cls, topic_name: str, topic_config: TopicConfig) -> TopicSpec: - """ - Prepares the POST request body needed for the topic creation + """Prepares the POST request body needed for the topic creation :param topic_name: The name of the topic :param topic_config: The topic config :return: diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index dc19656e4..d190dd091 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -33,8 +33,7 @@ def __init__(self, pipeline_config: PipelineConfig) -> None: @cached_property def cluster_id(self) -> str: - """ - Gets the Kafka cluster ID by sending a requests to Kafka REST proxy. + """Gets the Kafka cluster ID by sending a requests to Kafka REST proxy. More information about the cluster ID can be found here: https://docs.confluent.io/platform/current/kafka-rest/api.html#cluster-v3. @@ -54,8 +53,7 @@ def host(self) -> str: return self._host def create_topic(self, topic_spec: TopicSpec) -> None: - """ - Creates a topic. + """Creates a topic. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#post--clusters-cluster_id-topics :param topic_spec: The topic specification. """ @@ -72,8 +70,7 @@ def create_topic(self, topic_spec: TopicSpec) -> None: raise KafkaRestProxyError(response) def delete_topic(self, topic_name: str) -> None: - """ - Deletes a topic + """Deletes a topic API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#delete--clusters-cluster_id-topics-topic_name :param topic_name: Name of the topic. """ @@ -88,8 +85,7 @@ def delete_topic(self, topic_name: str) -> None: raise KafkaRestProxyError(response) def get_topic(self, topic_name: str) -> TopicResponse: - """ - Returns the topic with the given topic_name. + """Returns the topic with the given topic_name. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#get--clusters-cluster_id-topics-topic_name :param topic_name: The topic name. :return: Response of the get topic API. @@ -114,8 +110,7 @@ def get_topic(self, topic_name: str) -> TopicResponse: raise KafkaRestProxyError(response) def get_topic_config(self, topic_name: str) -> TopicConfigResponse: - """ - Return the config with the given topic_name. + """Return the config with the given topic_name. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#acl-v3 :param topic_name: The topic name. :return: The topic configuration. @@ -141,8 +136,7 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: raise KafkaRestProxyError(response) def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> None: - """ - Reset config of given config_name param to the default value on the kafka server. + """Reset config of given config_name param to the default value on the kafka server. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#post--clusters-cluster_id-topics-topic_name-configs-alter :param topic_name: The topic name. :param config_name: The configuration parameter name. @@ -159,8 +153,7 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No raise KafkaRestProxyError(response) def get_broker_config(self) -> BrokerConfigResponse: - """ - Return the list of configuration parameters for all the brokers in the given Kafka cluster. + """Return the list of configuration parameters for all the brokers in the given Kafka cluster. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#get--clusters-cluster_id-brokers---configs :return: The broker configuration. """ diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index d41b1cf74..ee9e93225 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -185,8 +185,7 @@ def inflate(self) -> list[PipelineComponent]: return [self] def template(self) -> None: - """ - Runs `helm template`. + """Runs `helm template`. From HELM: Render chart templates locally and display the output. Any values that would normally be looked up or retrieved in-cluster will diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 1cddd830e..253b8be9c 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -38,8 +38,7 @@ def field_schema(field: ModelField, **kwargs: Any) -> Any: def _is_valid_component( defined_component_types: set[str], component: type[PipelineComponent] ) -> bool: - """ - Check whether a PipelineComponent subclass has a valid definition for the schema generation. + """Check whether a PipelineComponent subclass has a valid definition for the schema generation. :param defined_component_types: types defined so far :param component: component type to be validated diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index b8b248035..5223c4c21 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -54,8 +54,7 @@ class SubPipelineComponentCorrect(SubPipelineComponent): # Correctly defined, docstr test class SubPipelineComponentCorrectDocstr(SubPipelineComponent): - """ - Newline before title is removed. + """Newline before title is removed. Summarry is correctly imported. All From 11a54a5ad0b6cb76f2ca42fc46c09997f5297797 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:01:27 +0300 Subject: [PATCH 10/76] chore: fix docstrings flagged by "D" --- hooks/gen_schema.py | 2 +- kpops/cli/registry.py | 3 +- kpops/component_handlers/helm_wrapper/helm.py | 4 +- .../component_handlers/helm_wrapper/model.py | 5 +- .../component_handlers/helm_wrapper/utils.py | 1 + .../kafka_connect/connect_wrapper.py | 30 +++++++++--- .../kafka_connect/kafka_connect_handler.py | 8 ++- .../kafka_connect/timeout.py | 3 +- kpops/component_handlers/topic/handler.py | 5 +- .../component_handlers/topic/proxy_wrapper.py | 49 +++++++++++++++---- .../base_components/kafka_connector.py | 4 +- .../base_components/pipeline_component.py | 2 +- kpops/utils/dict_differ.py | 2 +- 13 files changed, 85 insertions(+), 33 deletions(-) diff --git a/hooks/gen_schema.py b/hooks/gen_schema.py index 438af41ee..7d6b99f2e 100644 --- a/hooks/gen_schema.py +++ b/hooks/gen_schema.py @@ -10,7 +10,7 @@ def gen_schema(scope: SchemaScope): - """Generates the specified schema and saves it to a file. + """Generate the specified schema and save it to a file. The file is located in docs/docs/schema and is named ``.json`` diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index a5a1f82ef..e861d921d 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -27,7 +27,8 @@ class Registry: _classes: ClassDict[PipelineComponent] = field(default_factory=dict, init=False) def find_components(self, module_name: str) -> None: - """Find all PipelineComponent subclasses in module + """Find all PipelineComponent subclasses in module. + :param module_name: name of the python module. """ for _class in _find_classes(module_name, PipelineComponent): diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index ed4adeba3..0bab92d5c 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -77,7 +77,7 @@ def upgrade_install( values: dict, flags: HelmUpgradeInstallFlags = HelmUpgradeInstallFlags(), ) -> str: - """Prepares and executes the `helm upgrade --install` command.""" + """Prepare and execute the `helm upgrade --install` command.""" with tempfile.NamedTemporaryFile("w") as values_file: yaml.safe_dump(values, values_file) @@ -103,7 +103,7 @@ def uninstall( release_name: str, dry_run: bool, ) -> str | None: - """Prepares and executes the helm uninstall command.""" + """Prepare and execute the helm uninstall command.""" command = [ "helm", "uninstall", diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 3c29839a2..b3c5de16e 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -205,8 +205,9 @@ def __iter__(self) -> Iterator[str]: @property def manifest(self) -> str: - """Reads the manifest section of Helm stdout. `helm upgrade --install` output message contains three sections - in the following order: + """Reads the manifest section of Helm stdout. + + `helm upgrade --install` output message contains three sections in the following order: - HOOKS - MANIFEST diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index a04b97f20..7ad76b93a 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -8,6 +8,7 @@ def trim_release_name(name: str, suffix: str = "") -> str: """Trim Helm release name while preserving suffix. + :param name: The release name including optional suffix :param suffix: The release suffix to preserve :return: Truncated release name. diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index a6e5ef3cb..ccd9666e3 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -38,7 +38,8 @@ def host(self) -> str: def create_connector( self, connector_config: KafkaConnectorConfig ) -> KafkaConnectResponse: - """Creates a new connector + """Create a new connector. + API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#post--connectors :param connector_config: The config of the connector :return: The current connector info if successful. @@ -62,7 +63,10 @@ def create_connector( def get_connector(self, connector_name: str) -> KafkaConnectResponse: """Get information about the connector. - API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) + + API Reference: + https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) + :param connector_name: Nameof the crated connector :return: Information about the connector. """ @@ -87,7 +91,11 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: def update_connector_config( self, connector_config: KafkaConnectorConfig ) -> KafkaConnectResponse: - """Create a new connector using the given configuration, or update the configuration for an existing connector. + """Create or update a connector. + + Create a new connector using the given configuration,or update the + configuration for an existing connector. + :param connector_config: Configuration parameters for the connector. :return: Information about the connector after the change has been made. """ @@ -118,9 +126,11 @@ def update_connector_config( def validate_connector_config( self, connector_config: KafkaConnectorConfig ) -> list[str]: - """Validate connector config using the given configuration + """Validate connector config using the given configuration. + :param connector_config: Configuration parameters for the connector. - :return: + :raises KafkaConnectError: Kafka Konnect error + :return: List of all found errors """ response = httpx.put( url=f"{self._host}/connector-plugins/{connector_config.class_name}/config/validate", @@ -133,7 +143,7 @@ def validate_connector_config( **response.json() ) - errors = [] + errors: list[str] = [] if kafka_connect_error_response.error_count > 0: for config in kafka_connect_error_response.configs: if len(config.value.errors) > 0: @@ -145,8 +155,12 @@ def validate_connector_config( raise KafkaConnectError(response) def delete_connector(self, connector_name: str) -> None: - """Deletes a connector, halting all tasks and deleting its configuration. - API Reference:https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)-. + """Delete a connector, halting all tasks and deleting its configuration. + + API Reference: + https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)-. + :param connector_name: Configuration parameters for the connector. + :raises ConnectorNotFoundException: Connector not found """ response = httpx.delete( url=f"{self._host}/connectors/{connector_name}", headers=HEADERS diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 1268ed6c7..766c76b28 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -36,7 +36,10 @@ def __init__( def create_connector( self, connector_config: KafkaConnectorConfig, *, dry_run: bool ) -> None: - """Creates a connector. If the connector exists the config of that connector gets updated. + """Create a connector. + + If the connector exists the config of that connector gets updated. + :param connector_config: The connector config. :param dry_run: If the connector creation should be run in dry run mode. """ @@ -63,7 +66,8 @@ def create_connector( ) def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None: - """Deletes a connector resource from the cluster. + """Delete a connector resource from the cluster. + :param connector_name: The connector name. :param dry_run: If the connector deletion should be run in dry run mode. """ diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index 7cdfe5801..ec389bed5 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -9,7 +9,8 @@ def timeout(func: Callable[..., T], *, secs: int = 0) -> T | None: - """Sets a timeout for a given lambda function + """Set a timeout for a given lambda function. + :param func: The callable function :param secs: The timeout in seconds. """ diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 789b5b249..afc4f6e77 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -199,10 +199,11 @@ def __dry_run_topic_deletion(self, topic_name: str) -> None: @classmethod def __prepare_body(cls, topic_name: str, topic_config: TopicConfig) -> TopicSpec: - """Prepares the POST request body needed for the topic creation + """Prepare the POST request body needed for the topic creation. + :param topic_name: The name of the topic :param topic_config: The topic config - :return: + :return: Topic specification """ topic_spec_json: dict = topic_config.dict( include={ diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index d190dd091..88fc6e310 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -33,12 +33,15 @@ def __init__(self, pipeline_config: PipelineConfig) -> None: @cached_property def cluster_id(self) -> str: - """Gets the Kafka cluster ID by sending a requests to Kafka REST proxy. + """Get the Kafka cluster ID by sending a request to Kafka REST proxy. + More information about the cluster ID can be found here: https://docs.confluent.io/platform/current/kafka-rest/api.html#cluster-v3. Currently both Kafka and Kafka REST Proxy are only aware of the Kafka cluster pointed at by the bootstrap.servers configuration. Therefore, only one Kafka cluster will be returned. + + :raises KafkaRestProxyError: Kafka REST proxy error :return: The Kafka cluster ID. """ response = httpx.get(url=f"{self._host}/v3/clusters") @@ -53,9 +56,13 @@ def host(self) -> str: return self._host def create_topic(self, topic_spec: TopicSpec) -> None: - """Creates a topic. - API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#post--clusters-cluster_id-topics + """Create a topic. + + API Reference: + https://docs.confluent.io/platform/current/kafka-rest/api.html#post--clusters-cluster_id-topics + :param topic_spec: The topic specification. + :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.post( url=f"{self._host}/v3/clusters/{self.cluster_id}/topics", @@ -70,9 +77,13 @@ def create_topic(self, topic_spec: TopicSpec) -> None: raise KafkaRestProxyError(response) def delete_topic(self, topic_name: str) -> None: - """Deletes a topic - API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#delete--clusters-cluster_id-topics-topic_name + """Delete a topic. + + API Reference: + https://docs.confluent.io/platform/current/kafka-rest/api.html#delete--clusters-cluster_id-topics-topic_name + :param topic_name: Name of the topic. + :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.delete( url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}", @@ -85,9 +96,14 @@ def delete_topic(self, topic_name: str) -> None: raise KafkaRestProxyError(response) def get_topic(self, topic_name: str) -> TopicResponse: - """Returns the topic with the given topic_name. - API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#get--clusters-cluster_id-topics-topic_name + """Return the topic with the given topic_name. + + API Reference: + https://docs.confluent.io/platform/current/kafka-rest/api.html#get--clusters-cluster_id-topics-topic_name + :param topic_name: The topic name. + :raises TopicNotFoundException: Topic not found + :raises KafkaRestProxyError: Kafka REST proxy error :return: Response of the get topic API. """ response = httpx.get( @@ -111,8 +127,13 @@ def get_topic(self, topic_name: str) -> TopicResponse: def get_topic_config(self, topic_name: str) -> TopicConfigResponse: """Return the config with the given topic_name. - API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#acl-v3 + + API Reference: + https://docs.confluent.io/platform/current/kafka-rest/api.html#acl-v3 + :param topic_name: The topic name. + :raises TopicNotFoundException: Topic not found + :raises KafkaRestProxyError: Kafka REST proxy error :return: The topic configuration. """ response = httpx.get( @@ -137,9 +158,13 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> None: """Reset config of given config_name param to the default value on the kafka server. - API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#post--clusters-cluster_id-topics-topic_name-configs-alter + + API Reference: + https://docs.confluent.io/platform/current/kafka-rest/api.html#post--clusters-cluster_id-topics-topic_name-configs-alter + :param topic_name: The topic name. :param config_name: The configuration parameter name. + :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.post( url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", @@ -154,7 +179,11 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No def get_broker_config(self) -> BrokerConfigResponse: """Return the list of configuration parameters for all the brokers in the given Kafka cluster. - API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#get--clusters-cluster_id-brokers---configs + + API Reference: + https://docs.confluent.io/platform/current/kafka-rest/api.html#get--clusters-cluster_id-brokers---configs + + :raises KafkaRestProxyError: Kafka REST proxy error :return: The broker configuration. """ response = httpx.get( diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 73a0d1e38..b63aaebda 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -306,7 +306,7 @@ def clean(self, dry_run: bool) -> None: self.__run_kafka_connect_resetter(dry_run) def __run_kafka_connect_resetter(self, dry_run: bool) -> None: - """Runs the connector resetter. + """Run the connector resetter. :param dry_run: Whether to do a dry run of the command """ @@ -361,7 +361,7 @@ def clean(self, dry_run: bool) -> None: def __run_kafka_connect_resetter( self, dry_run: bool, delete_consumer_group: bool ) -> None: - """Runs the connector resetter. + """Run the connector resetter. :param dry_run: Whether to do a dry run of the command :param delete_consumer_group: Whether the consumer group should be deleted or not diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index ee9e93225..cfb339d6d 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -185,7 +185,7 @@ def inflate(self) -> list[PipelineComponent]: return [self] def template(self) -> None: - """Runs `helm template`. + """Run `helm template`. From HELM: Render chart templates locally and display the output. Any values that would normally be looked up or retrieved in-cluster will diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index 707ea0bd0..98ec768cb 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -64,7 +64,7 @@ def from_dicts( @staticmethod def __find_changed_key(key_1: list[str] | str, key_2: str = "") -> str: - """Generates a string that points to the changed key in the dictionary.""" + """Generate a string that points to the changed key in the dictionary.""" if isinstance(key_1, list) and len(key_1) > 1: return f"{key_1[0]}[{key_1[1]}]" if not key_1: From 53ba27d2bd21eaf8ffae16d76b2cce1fbce11549 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:03:01 +0300 Subject: [PATCH 11/76] ci(ruff-pyupgrade): autofix --- hooks/gen_docs/gen_docs_env_vars.py | 2 +- kpops/cli/main.py | 3 ++- kpops/component_handlers/helm_wrapper/helm.py | 5 ++--- kpops/component_handlers/helm_wrapper/helm_diff.py | 2 +- kpops/component_handlers/helm_wrapper/model.py | 2 +- kpops/component_handlers/kafka_connect/timeout.py | 3 ++- kpops/components/streams_bootstrap/streams/model.py | 3 ++- kpops/utils/dict_differ.py | 3 ++- kpops/utils/dict_ops.py | 3 ++- kpops/utils/environment.py | 2 +- kpops/utils/gen_schema.py | 3 ++- 11 files changed, 18 insertions(+), 13 deletions(-) diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 436ba19de..38b97b7b5 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -282,7 +282,7 @@ def __fill_csv_cli(target: Path) -> None: var_in_main = getattr(main, var_in_main_name) if ( not var_in_main_name.startswith("__") - and isinstance(var_in_main, (OptionInfo, ArgumentInfo)) + and isinstance(var_in_main, OptionInfo | ArgumentInfo) and var_in_main.envvar ): cli_env_var_description: list[str] = [ diff --git a/kpops/cli/main.py b/kpops/cli/main.py index f58808cd2..8a70c47f9 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -3,7 +3,8 @@ import logging from enum import Enum from pathlib import Path -from typing import TYPE_CHECKING, Iterator, Optional +from typing import TYPE_CHECKING, Optional +from collections.abc import Iterator import dtyper import typer diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 0bab92d5c..e9d4dbf39 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -5,7 +5,7 @@ import subprocess import tempfile from collections.abc import Iterator -from typing import Iterable +from collections.abc import Iterable import yaml @@ -206,8 +206,7 @@ def __execute(self, command: list[str]) -> str: log.debug(f"Executing {' '.join(command)}") process = subprocess.run( command, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + capture_output=True, text=True, ) Helm.parse_helm_command_stderr_output(process.stderr) diff --git a/kpops/component_handlers/helm_wrapper/helm_diff.py b/kpops/component_handlers/helm_wrapper/helm_diff.py index 7c49e2ef0..74e2bd36b 100644 --- a/kpops/component_handlers/helm_wrapper/helm_diff.py +++ b/kpops/component_handlers/helm_wrapper/helm_diff.py @@ -1,6 +1,6 @@ import logging from collections.abc import Iterator -from typing import Iterable +from collections.abc import Iterable from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig, HelmTemplate from kpops.utils.dict_differ import Change, render_diff diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index b3c5de16e..35dc43e0d 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -1,6 +1,6 @@ from dataclasses import dataclass from pathlib import Path -from typing import Iterator +from collections.abc import Iterator import yaml from pydantic import BaseConfig, BaseModel, Extra, Field diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index ec389bed5..6f0fd788d 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -1,7 +1,8 @@ import asyncio import logging from asyncio import TimeoutError -from typing import Callable, TypeVar +from typing import TypeVar +from collections.abc import Callable log = logging.getLogger("Timeout") diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 34fcd1d3e..bfead03eb 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,4 +1,5 @@ -from typing import AbstractSet, Any, Mapping +from typing import AbstractSet, Any +from collections.abc import Mapping from pydantic import BaseConfig, BaseModel, Extra, Field from typing_extensions import override diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index 98ec768cb..dbc53f67c 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -3,7 +3,8 @@ from dataclasses import dataclass from difflib import Differ from enum import Enum -from typing import Generic, Iterable, Iterator, Sequence, TypeVar +from typing import Generic, TypeVar +from collections.abc import Iterable, Iterator, Sequence import typer import yaml diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index 1b081cb7f..94c9003f4 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -1,4 +1,5 @@ -from typing import Any, Mapping +from typing import Any +from collections.abc import Mapping def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: diff --git a/kpops/utils/environment.py b/kpops/utils/environment.py index c46f83611..0ed7ae920 100644 --- a/kpops/utils/environment.py +++ b/kpops/utils/environment.py @@ -1,7 +1,7 @@ import os import platform from collections import UserDict -from typing import Callable +from collections.abc import Callable class Environment(UserDict): diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 253b8be9c..f202d0706 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -2,7 +2,8 @@ import logging from abc import ABC from enum import Enum -from typing import Annotated, Any, Literal, Sequence, Union +from typing import Annotated, Any, Literal, Union +from collections.abc import Sequence from pydantic import BaseConfig, Field, schema, schema_json_of from pydantic.fields import FieldInfo, ModelField From d090ff041a80b1dcc7086801533effcb8ba5d59e Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:04:17 +0300 Subject: [PATCH 12/76] chore: manually fix code flagged by "UP" --- kpops/components/streams_bootstrap/streams/model.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index bfead03eb..ece5c042b 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,5 +1,5 @@ -from typing import AbstractSet, Any -from collections.abc import Mapping +from typing import Any +from collections.abc import Mapping, Set from pydantic import BaseConfig, BaseModel, Extra, Field from typing_extensions import override @@ -76,8 +76,8 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: def dict( self, *, - include: None | AbstractSet[int | str] | Mapping[int | str, Any] = None, - exclude: None | AbstractSet[int | str] | Mapping[int | str, Any] = None, + include: None | Set [int | str] | Mapping[int | str, Any] = None, + exclude: None | Set [int | str] | Mapping[int | str, Any] = None, by_alias: bool = False, skip_defaults: bool | None = None, exclude_unset: bool = False, From feb6df38fb11114154b93854029dc9ad3f53f075 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:06:26 +0300 Subject: [PATCH 13/76] chore: update snapshots --- .../cli/snapshots/snap_test_schema_generation.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index 2a19e65c1..2dd92b512 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -58,7 +58,7 @@ }, "FromSection": { "additionalProperties": false, - "description": "Holds multiple input topics", + "description": "Holds multiple input topics.", "properties": { "components": { "additionalProperties": { @@ -84,7 +84,7 @@ }, "FromTopic": { "additionalProperties": false, - "description": "Input topic", + "description": "Input topic.", "properties": { "role": { "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", @@ -104,7 +104,7 @@ "type": "object" }, "InputTopicTypes": { - "description": "Input topic types\\n\\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)", + "description": "Input topic types.\\n\\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)", "enum": [ "input", "pattern" @@ -113,7 +113,7 @@ "type": "string" }, "OutputTopicTypes": { - "description": "Types of output topic\\n\\nOUTPUT (output topic), ERROR (error topic)", + "description": "Types of output topic.\\n\\nOUTPUT (output topic), ERROR (error topic)", "enum": [ "output", "error" @@ -216,7 +216,7 @@ "type": "object" }, "SubPipelineComponentCorrectDocstr": { - "description": "Newline before title is removed\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", + "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", "properties": { "example_attr": { "description": "Parameter description looks correct and it is not included in the class description, terminates here", @@ -254,7 +254,7 @@ }, "type": { "default": "sub-pipeline-component-correct-docstr", - "description": "Newline before title is removed\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", + "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", "enum": [ "sub-pipeline-component-correct-docstr" ], @@ -317,7 +317,7 @@ "type": "object" }, "ToSection": { - "description": "Holds multiple output topics", + "description": "Holds multiple output topics.", "properties": { "models": { "additionalProperties": { @@ -343,7 +343,7 @@ }, "TopicConfig": { "additionalProperties": false, - "description": "Configure an output topic", + "description": "Configure an output topic.", "properties": { "configs": { "additionalProperties": { From cc06875109a6352505df6692e4af1c0f0010061a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:17:32 +0300 Subject: [PATCH 14/76] ci(ruff): add rule ignores for "B" --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 58451787a..3a64dda06 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,6 +90,8 @@ ignore = [ "D415", # First line should end with a period, question mark, or exclamation point "D416", # Section name should end with a colon ("{name}") "D417", # Missing argument description in the docstring for {definition}: {name} + "B009", # Do not call getattr with a constant attribute value. -- Not always applicable + "B010", # Do not call setattr with a constant attribute value. -- Not always applicable "RUF012", # type class attrs with `ClassVar` -- Too strict/trigger-happy "UP007", # Use X | Y for type annotations -- `typer` doesn't support it "RET505", # Unnecessary {branch} after return statement -- Lots of false positives From 687f37d745ced8b58aaa9f8200f44138e98846ec Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:31:43 +0300 Subject: [PATCH 15/76] ci(ruff): configure "B" --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 3a64dda06..6ecf2b6d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -150,6 +150,9 @@ exclude = ["tests/*snapshots/*"] [tool.ruff.extend-per-file-ignores] "tests/*/__init__.py" = ["F401"] +[tool.ruff.flake8-bugbear] +extend-immutable-calls = ["typer.Argument"] + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" From 1b6e29cf2c3ee184e69d48a3d4971f78d0e46113 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:32:03 +0300 Subject: [PATCH 16/76] chore: manually fix code flagged by "B" --- kpops/cli/registry.py | 8 ++++---- kpops/component_handlers/helm_wrapper/helm.py | 12 +++++++++--- .../schema_handler/schema_handler.py | 4 ++-- .../components/base_components/pipeline_component.py | 4 +++- kpops/pipeline_generator/pipeline.py | 4 ++-- 5 files changed, 20 insertions(+), 12 deletions(-) diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index e861d921d..97a910bcd 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -37,17 +37,17 @@ def find_components(self, module_name: str) -> None: def __getitem__(self, component_type: str) -> type[PipelineComponent]: try: return self._classes[component_type] - except KeyError: + except KeyError as ke: raise ClassNotFoundError( f"Could not find a component of type {component_type}" - ) + ) from ke def find_class(module_name: str, baseclass: type[T]) -> type[T]: try: return next(_find_classes(module_name, baseclass)) - except StopIteration: - raise ClassNotFoundError + except StopIteration as e: + raise ClassNotFoundError from e def _find_classes(module_name: str, baseclass: type[T]) -> Iterator[type[T]]: diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index e9d4dbf39..8eb65b583 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -37,8 +37,10 @@ def add_repo( self, repository_name: str, repository_url: str, - repo_auth_flags: RepoAuthFlags = RepoAuthFlags(), + repo_auth_flags: RepoAuthFlags | None = None, ) -> None: + if repo_auth_flags is None: + repo_auth_flags = RepoAuthFlags() command = [ "helm", "repo", @@ -75,9 +77,11 @@ def upgrade_install( dry_run: bool, namespace: str, values: dict, - flags: HelmUpgradeInstallFlags = HelmUpgradeInstallFlags(), + flags: HelmUpgradeInstallFlags | None = None, ) -> str: """Prepare and execute the `helm upgrade --install` command.""" + if flags is None: + flags = HelmUpgradeInstallFlags() with tempfile.NamedTemporaryFile("w") as values_file: yaml.safe_dump(values, values_file) @@ -126,7 +130,7 @@ def template( chart: str, namespace: str, values: dict, - flags: HelmTemplateFlags = HelmTemplateFlags(), + flags: HelmTemplateFlags | None = None, ) -> str: """From HELM: Render chart templates locally and display the output. @@ -141,6 +145,8 @@ def template( :param flags: the flags to be set for `helm template`, defaults to HelmTemplateFlags() :return: the output of `helm template` """ + if flags is None: + flags = HelmTemplateFlags() with tempfile.NamedTemporaryFile("w") as values_file: yaml.safe_dump(values, values_file) command = [ diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index a053ccc62..e5e13e70e 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -34,11 +34,11 @@ def schema_provider(self) -> SchemaProvider: ) schema_provider_class = find_class(self.components_module, SchemaProvider) return schema_provider_class() # pyright: ignore[reportGeneralTypeIssues] - except ClassNotFoundError: + except ClassNotFoundError as e: raise ValueError( f"No schema provider found in components module {self.components_module}. " f"Please implement the abstract method in {SchemaProvider.__module__}.{SchemaProvider.__name__}." - ) + ) from e @classmethod def load_schema_handler( diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index cfb339d6d..d05d4d4c1 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -158,12 +158,14 @@ def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: def weave_from_topics( self, to: ToSection | None, - from_topic: FromTopic = FromTopic(type=InputTopicTypes.INPUT), + from_topic: FromTopic | None = None, ) -> None: """Weave output topics of upstream component or from component into config. Override this method to apply custom logic """ + if from_topic is None: + from_topic = FromTopic(type=InputTopicTypes.INPUT) if not to: return input_topics = [ diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index f941f80de..09d91c923 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -163,10 +163,10 @@ def parse_components(self, component_list: list[dict]) -> None: try: try: component_type: str = component_data["type"] - except KeyError: + except KeyError as ke: raise ValueError( "Every component must have a type defined, this component does not have one." - ) + ) from ke component_class = self.registry[component_type] self.apply_component(component_class, component_data) except Exception as ex: From 1621db15f529b15947ea8aefc891397cbb6506c1 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:36:59 +0300 Subject: [PATCH 17/76] chore: fix code flagged by "INP" --- .github/scripts/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 .github/scripts/__init__.py diff --git a/.github/scripts/__init__.py b/.github/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb From d0eefab5a6bc648393c134714a94e455f49662ab Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:45:31 +0300 Subject: [PATCH 18/76] chore(ruff): fix code flagged by "BLE" --- kpops/component_handlers/helm_wrapper/helm.py | 2 +- kpops/pipeline_generator/pipeline.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 8eb65b583..95c8ee4f0 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -52,7 +52,7 @@ def add_repo( try: self.__execute(command) - except Exception as e: + except (ReleaseNotFoundException, RuntimeError) as e: if ( len(e.args) == 1 and re.match( diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 09d91c923..7b3515e1c 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -169,7 +169,7 @@ def parse_components(self, component_list: list[dict]) -> None: ) from ke component_class = self.registry[component_type] self.apply_component(component_class, component_data) - except Exception as ex: + except Exception as ex: # noqa: BLE001 if "name" in component_data: raise ParsingException( f"Error enriching {component_data['type']} component {component_data['name']}" From 301284eb02dbb707fefa5ab50476b06ebee757ff Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 26 Sep 2023 23:47:01 +0300 Subject: [PATCH 19/76] ci(ruff-commas): autofix --- kpops/cli/custom_formatter.py | 2 +- kpops/cli/main.py | 40 +++++------ kpops/cli/pipeline_config.py | 2 +- kpops/cli/registry.py | 4 +- .../helm_wrapper/dry_run_handler.py | 2 +- kpops/component_handlers/helm_wrapper/helm.py | 6 +- .../component_handlers/helm_wrapper/model.py | 16 ++--- .../component_handlers/helm_wrapper/utils.py | 2 +- .../kafka_connect/connect_wrapper.py | 24 +++---- .../kafka_connect/kafka_connect_handler.py | 20 +++--- .../kafka_connect/timeout.py | 2 +- .../schema_handler/schema_handler.py | 30 ++++----- .../schema_handler/schema_provider.py | 2 +- kpops/component_handlers/topic/handler.py | 42 ++++++------ .../component_handlers/topic/proxy_wrapper.py | 2 +- kpops/component_handlers/topic/utils.py | 8 +-- kpops/component_handlers/utils/exception.py | 2 +- .../base_defaults_component.py | 18 ++--- kpops/components/base_components/kafka_app.py | 14 ++-- .../base_components/kafka_connector.py | 22 +++---- .../base_components/kubernetes_app.py | 6 +- .../base_components/models/from_section.py | 2 +- .../base_components/models/to_section.py | 8 +-- .../streams_bootstrap/producer/model.py | 6 +- .../streams_bootstrap/streams/model.py | 18 ++--- kpops/pipeline_generator/pipeline.py | 32 ++++----- kpops/utils/dict_differ.py | 6 +- kpops/utils/dict_ops.py | 2 +- kpops/utils/environment.py | 2 +- kpops/utils/gen_schema.py | 10 +-- kpops/utils/yaml_loading.py | 6 +- tests/cli/resources/module.py | 2 +- tests/cli/test_pipeline_steps.py | 6 +- tests/cli/test_schema_generation.py | 6 +- .../helm_wrapper/test_dry_run_handler.py | 10 +-- .../helm_wrapper/test_helm_diff.py | 4 +- .../helm_wrapper/test_helm_wrapper.py | 46 ++++++------- .../helm_wrapper/test_utils.py | 2 +- .../kafka_connect/test_connect_handler.py | 46 ++++++------- .../kafka_connect/test_connect_wrapper.py | 52 +++++++-------- .../schema_handler/resources/module.py | 2 +- .../schema_handler/test_schema_handler.py | 54 +++++++-------- .../topic/test_proxy_wrapper.py | 18 ++--- .../topic/test_topic_handler.py | 66 +++++++++---------- tests/component_handlers/topic/test_utils.py | 6 +- .../test_base_defaults_component.py | 18 ++--- tests/components/test_kafka_app.py | 2 +- tests/components/test_kafka_connector.py | 10 +-- tests/components/test_kafka_sink_connector.py | 40 +++++------ .../components/test_kafka_source_connector.py | 28 ++++---- tests/components/test_kubernetes_app.py | 10 +-- tests/components/test_producer_app.py | 26 ++++---- tests/components/test_streams_app.py | 64 +++++++++--------- tests/pipeline/test_components/components.py | 14 ++-- .../components.py | 2 +- tests/pipeline/test_pipeline.py | 4 +- tests/utils/test_dict_ops.py | 2 +- tests/utils/test_diff.py | 2 +- tests/utils/test_environment.py | 2 +- 59 files changed, 451 insertions(+), 451 deletions(-) diff --git a/kpops/cli/custom_formatter.py b/kpops/cli/custom_formatter.py index 69fc1c73d..ef977d24f 100644 --- a/kpops/cli/custom_formatter.py +++ b/kpops/cli/custom_formatter.py @@ -16,7 +16,7 @@ def format(self, record): logging.WARNING: typer.style(message_format, fg=typer.colors.YELLOW), logging.ERROR: typer.style(message_format, fg=typer.colors.RED), logging.CRITICAL: typer.style( - message_format, fg=typer.colors.RED, bold=True + message_format, fg=typer.colors.RED, bold=True, ), } diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 8a70c47f9..540e4d5c1 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -121,12 +121,12 @@ def setup_pipeline( handlers = setup_handlers(components_module, pipeline_config) return Pipeline.load_from_yaml( - pipeline_base_dir, pipeline_path, registry, pipeline_config, handlers + pipeline_base_dir, pipeline_path, registry, pipeline_config, handlers, ) def setup_handlers( - components_module: str | None, config: PipelineConfig + components_module: str | None, config: PipelineConfig, ) -> ComponentHandlers: schema_handler = SchemaHandler.load_schema_handler(components_module, config) connector_handler = KafkaConnectHandler.from_pipeline_config(config) @@ -149,13 +149,13 @@ def get_step_names(steps_to_apply: list[PipelineComponent]) -> list[str]: def filter_steps_to_apply( - pipeline: Pipeline, steps: set[str], filter_type: FilterType + pipeline: Pipeline, steps: set[str], filter_type: FilterType, ) -> list[PipelineComponent]: def is_in_steps(component: PipelineComponent) -> bool: return component.name in steps log.debug( - f"KPOPS_PIPELINE_STEPS is defined with values: {steps} and filter type of {filter_type.value}" + f"KPOPS_PIPELINE_STEPS is defined with values: {steps} and filter type of {filter_type.value}", ) filtered_steps = [ component @@ -171,7 +171,7 @@ def is_in_steps(component: PipelineComponent) -> bool: def get_steps_to_apply( - pipeline: Pipeline, steps: str | None, filter_type: FilterType + pipeline: Pipeline, steps: str | None, filter_type: FilterType, ) -> list[PipelineComponent]: if steps: return filter_steps_to_apply(pipeline, parse_steps(steps), filter_type) @@ -179,7 +179,7 @@ def get_steps_to_apply( def reverse_pipeline_steps( - pipeline: Pipeline, steps: str | None, filter_type: FilterType + pipeline: Pipeline, steps: str | None, filter_type: FilterType, ) -> Iterator[PipelineComponent]: return reversed(get_steps_to_apply(pipeline, steps, filter_type)) @@ -193,7 +193,7 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_pipeline_config( - config: Path, defaults: Optional[Path], verbose: bool + config: Path, defaults: Optional[Path], verbose: bool, ) -> PipelineConfig: setup_logging_level(verbose) PipelineConfig.Config.config_path = config @@ -210,7 +210,7 @@ def create_pipeline_config( Generate json schema. The schemas can be used to enable support for kpops files in a text editor. - """ + """, ) def schema( scope: SchemaScope = typer.Argument( @@ -225,7 +225,7 @@ def schema( ), components_module: Optional[str] = COMPONENTS_MODULES, include_stock_components: bool = typer.Option( - default=True, help="Include the built-in KPOps components." + default=True, help="Include the built-in KPOps components.", ), ) -> None: match scope: @@ -236,7 +236,7 @@ def schema( @app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 - help="Enriches pipelines steps with defaults. The output is used as input for the deploy/destroy/... commands." + help="Enriches pipelines steps with defaults. The output is used as input for the deploy/destroy/... commands.", ) def generate( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -251,7 +251,7 @@ def generate( ) -> Pipeline: pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, pipeline_config, ) if not template: @@ -264,14 +264,14 @@ def generate( elif steps: log.warning( "The following flags are considered only when `--template` is set: \n \ - '--steps'" + '--steps'", ) return pipeline @app.command( - help="Deploy pipeline steps" + help="Deploy pipeline steps", ) # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def deploy( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -286,7 +286,7 @@ def deploy( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, pipeline_config, ) steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) @@ -296,7 +296,7 @@ def deploy( @app.command( - help="Destroy pipeline steps" + help="Destroy pipeline steps", ) # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def destroy( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -311,7 +311,7 @@ def destroy( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, pipeline_config, ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -320,7 +320,7 @@ def destroy( @app.command( - help="Reset pipeline steps" + help="Reset pipeline steps", ) # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def reset( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -335,7 +335,7 @@ def reset( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, pipeline_config, ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -345,7 +345,7 @@ def reset( @app.command( - help="Clean pipeline steps" + help="Clean pipeline steps", ) # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def clean( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -360,7 +360,7 @@ def clean( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, pipeline_config, ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index 1400323f5..eb30b7c99 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -112,7 +112,7 @@ def customise_sources( env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> tuple[ - SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], ... + SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], ..., ]: return ( env_settings, diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index 97a910bcd..fc40f8938 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -39,7 +39,7 @@ def __getitem__(self, component_type: str) -> type[PipelineComponent]: return self._classes[component_type] except KeyError as ke: raise ClassNotFoundError( - f"Could not find a component of type {component_type}" + f"Could not find a component of type {component_type}", ) from ke @@ -56,7 +56,7 @@ def _find_classes(module_name: str, baseclass: type[T]) -> Iterator[type[T]]: if issubclass(_class, baseclass): # filter out internal kpops classes unless specifically requested if _class.__module__.startswith( - KPOPS_MODULE + KPOPS_MODULE, ) and not module_name.startswith(KPOPS_MODULE): continue yield _class diff --git a/kpops/component_handlers/helm_wrapper/dry_run_handler.py b/kpops/component_handlers/helm_wrapper/dry_run_handler.py index 2d28957b7..7b1429dab 100644 --- a/kpops/component_handlers/helm_wrapper/dry_run_handler.py +++ b/kpops/component_handlers/helm_wrapper/dry_run_handler.py @@ -18,7 +18,7 @@ def print_helm_diff(self, stdout: str, helm_release_name: str, log: Logger) -> N :param log: The Logger object of the component class """ current_release = list( - self._helm.get_manifest(helm_release_name, self.namespace) + self._helm.get_manifest(helm_release_name, self.namespace), ) if current_release: log.info(f"Helm release {helm_release_name} already exists") diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 95c8ee4f0..0eb519cb6 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -30,7 +30,7 @@ def __init__(self, helm_config: HelmConfig) -> None: self._version = self.get_version() if self._version.major != 3: raise RuntimeError( - f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}" + f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}", ) def add_repo( @@ -121,7 +121,7 @@ def uninstall( return self.__execute(command) except ReleaseNotFoundException: log.warning( - f"Release with name {release_name} not found. Could not uninstall app." + f"Release with name {release_name} not found. Could not uninstall app.", ) def template( @@ -184,7 +184,7 @@ def get_version(self) -> Version: version_match = re.search(r"^v(\d+(?:\.\d+){0,2})", short_version) if version_match is None: raise RuntimeError( - f"Could not parse the Helm version.\n\nHelm output:\n{short_version}" + f"Could not parse the Helm version.\n\nHelm output:\n{short_version}", ) version = map(int, version_match.group(1).split(".")) return Version(*version) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 35dc43e0d..93e0116e2 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -31,19 +31,19 @@ class RepoAuthFlags(BaseModel): """ username: str | None = Field( - default=None, description=describe_attr("username", __doc__) + default=None, description=describe_attr("username", __doc__), ) password: str | None = Field( - default=None, description=describe_attr("password", __doc__) + default=None, description=describe_attr("password", __doc__), ) ca_file: Path | None = Field( - default=None, description=describe_attr("ca_file", __doc__) + default=None, description=describe_attr("ca_file", __doc__), ) cert_file: Path | None = Field( - default=None, description=describe_attr("cert_file", __doc__) + default=None, description=describe_attr("cert_file", __doc__), ) insecure_skip_tls_verify: bool = Field( - default=False, description=describe_attr("insecure_skip_tls_verify", __doc__) + default=False, description=describe_attr("insecure_skip_tls_verify", __doc__), ) class Config(DescConfig): @@ -73,11 +73,11 @@ class HelmRepoConfig(BaseModel): """ repository_name: str = Field( - default=..., description=describe_attr("repository_name", __doc__) + default=..., description=describe_attr("repository_name", __doc__), ) url: str = Field(default=..., description=describe_attr("url", __doc__)) repo_auth_flags: RepoAuthFlags = Field( - default=RepoAuthFlags(), description=describe_attr("repo_auth_flags", __doc__) + default=RepoAuthFlags(), description=describe_attr("repo_auth_flags", __doc__), ) class Config(DescConfig): @@ -131,7 +131,7 @@ def to_command(self) -> list[str]: [ "--set-file", ",".join([f"{key}={path}" for key, path in self.set_file.items()]), - ] + ], ) if self.create_namespace: command.append("--create-namespace") diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index 7ad76b93a..e05ee187f 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -16,7 +16,7 @@ def trim_release_name(name: str, suffix: str = "") -> str: if len(name) > RELEASE_NAME_MAX_LEN: new_name = name[: (RELEASE_NAME_MAX_LEN - len(suffix))] + suffix log.critical( - f"Invalid Helm release name '{name}'. Truncating to {RELEASE_NAME_MAX_LEN} characters: \n {name} --> {new_name}" + f"Invalid Helm release name '{name}'. Truncating to {RELEASE_NAME_MAX_LEN} characters: \n {name} --> {new_name}", ) name = new_name return name diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index ccd9666e3..7f81abb56 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -36,7 +36,7 @@ def host(self) -> str: return self._host def create_connector( - self, connector_config: KafkaConnectorConfig + self, connector_config: KafkaConnectorConfig, ) -> KafkaConnectResponse: """Create a new connector. @@ -47,7 +47,7 @@ def create_connector( config_json = connector_config.dict() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self._host}/connectors", headers=HEADERS, json=connect_data + url=f"{self._host}/connectors", headers=HEADERS, json=connect_data, ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -55,7 +55,7 @@ def create_connector( return KafkaConnectResponse(**response.json()) elif response.status_code == httpx.codes.CONFLICT: log.warning( - "Rebalancing in progress while creating a connector... Retrying..." + "Rebalancing in progress while creating a connector... Retrying...", ) time.sleep(1) self.create_connector(connector_config) @@ -71,7 +71,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: :return: Information about the connector. """ response = httpx.get( - url=f"{self._host}/connectors/{connector_name}", headers=HEADERS + url=f"{self._host}/connectors/{connector_name}", headers=HEADERS, ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -82,14 +82,14 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: raise ConnectorNotFoundException() elif response.status_code == httpx.codes.CONFLICT: log.warning( - "Rebalancing in progress while getting a connector... Retrying..." + "Rebalancing in progress while getting a connector... Retrying...", ) sleep(1) self.get_connector(connector_name) raise KafkaConnectError(response) def update_connector_config( - self, connector_config: KafkaConnectorConfig + self, connector_config: KafkaConnectorConfig, ) -> KafkaConnectResponse: """Create or update a connector. @@ -117,14 +117,14 @@ def update_connector_config( return KafkaConnectResponse(**data) elif response.status_code == httpx.codes.CONFLICT: log.warning( - "Rebalancing in progress while updating a connector... Retrying..." + "Rebalancing in progress while updating a connector... Retrying...", ) sleep(1) self.update_connector_config(connector_config) raise KafkaConnectError(response) def validate_connector_config( - self, connector_config: KafkaConnectorConfig + self, connector_config: KafkaConnectorConfig, ) -> list[str]: """Validate connector config using the given configuration. @@ -140,7 +140,7 @@ def validate_connector_config( if response.status_code == httpx.codes.OK: kafka_connect_error_response = KafkaConnectConfigErrorResponse( - **response.json() + **response.json(), ) errors: list[str] = [] @@ -149,7 +149,7 @@ def validate_connector_config( if len(config.value.errors) > 0: for error in config.value.errors: errors.append( - f"Found error for field {config.value.name}: {error}" + f"Found error for field {config.value.name}: {error}", ) return errors raise KafkaConnectError(response) @@ -163,7 +163,7 @@ def delete_connector(self, connector_name: str) -> None: :raises ConnectorNotFoundException: Connector not found """ response = httpx.delete( - url=f"{self._host}/connectors/{connector_name}", headers=HEADERS + url=f"{self._host}/connectors/{connector_name}", headers=HEADERS, ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") @@ -173,7 +173,7 @@ def delete_connector(self, connector_name: str) -> None: raise ConnectorNotFoundException() elif response.status_code == httpx.codes.CONFLICT: log.warning( - "Rebalancing in progress while deleting a connector... Retrying..." + "Rebalancing in progress while deleting a connector... Retrying...", ) sleep(1) self.delete_connector(connector_name) diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 766c76b28..fb81aa411 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -34,7 +34,7 @@ def __init__( self._timeout = timeout def create_connector( - self, connector_config: KafkaConnectorConfig, *, dry_run: bool + self, connector_config: KafkaConnectorConfig, *, dry_run: bool, ) -> None: """Create a connector. @@ -54,7 +54,7 @@ def create_connector( timeout( lambda: self._connect_wrapper.update_connector_config( - connector_config + connector_config, ), secs=self._timeout, ) @@ -86,11 +86,11 @@ def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None: ) except ConnectorNotFoundException: log.warning( - f"Connector Destruction: the connector {connector_name} does not exist. Skipping." + f"Connector Destruction: the connector {connector_name} does not exist. Skipping.", ) def __dry_run_connector_creation( - self, connector_config: KafkaConnectorConfig + self, connector_config: KafkaConnectorConfig, ) -> None: connector_name = connector_config.name try: @@ -106,7 +106,7 @@ def __dry_run_connector_creation( except ConnectorNotFoundException: diff = render_diff({}, connector_config.dict()) log.info( - f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}" + f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}", ) log.debug("POST /connectors HTTP/1.1") log.debug(f"HOST: {self._connect_wrapper.host}") @@ -115,11 +115,11 @@ def __dry_run_connector_creation( if len(errors) > 0: formatted_errors = "\n".join(errors) raise ConnectorStateException( - f"Connector Creation: validating the connector config for connector {connector_name} resulted in the following errors: {formatted_errors}" + f"Connector Creation: validating the connector config for connector {connector_name} resulted in the following errors: {formatted_errors}", ) else: log.info( - f"Connector Creation: connector config for {connector_name} is valid!" + f"Connector Creation: connector config for {connector_name} is valid!", ) def __dry_run_connector_deletion(self, connector_name: str) -> None: @@ -127,14 +127,14 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: self._connect_wrapper.get_connector(connector_name) log.info( magentaify( - f"Connector Destruction: connector {connector_name} already exists. Deleting connector." - ) + f"Connector Destruction: connector {connector_name} already exists. Deleting connector.", + ), ) log.debug(f"DELETE /connectors/{connector_name} HTTP/1.1") log.debug(f"HOST: {self._connect_wrapper.host}") except ConnectorNotFoundException: log.warning( - f"Connector Destruction: connector {connector_name} does not exist and cannot be deleted. Skipping." + f"Connector Destruction: connector {connector_name} does not exist and cannot be deleted. Skipping.", ) @classmethod diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index 6f0fd788d..cc9b0d127 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -30,5 +30,5 @@ async def main_supervisor(func: Callable[..., T], secs: int) -> T: return complete except TimeoutError: log.error( - f"Kafka Connect operation {func.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml." + f"Kafka Connect operation {func.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml.", ) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index e5e13e70e..8c2065f7a 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -30,19 +30,19 @@ def schema_provider(self) -> SchemaProvider: try: if not self.components_module: raise ValueError( - f"The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your {SchemaProvider.__name__} implementation exists." + f"The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your {SchemaProvider.__name__} implementation exists.", ) schema_provider_class = find_class(self.components_module, SchemaProvider) return schema_provider_class() # pyright: ignore[reportGeneralTypeIssues] except ClassNotFoundError as e: raise ValueError( f"No schema provider found in components module {self.components_module}. " - f"Please implement the abstract method in {SchemaProvider.__module__}.{SchemaProvider.__name__}." + f"Please implement the abstract method in {SchemaProvider.__module__}.{SchemaProvider.__name__}.", ) from e @classmethod def load_schema_handler( - cls, components_module: str | None, config: PipelineConfig + cls, components_module: str | None, config: PipelineConfig, ) -> SchemaHandler | None: if not config.schema_registry_url: return None @@ -58,14 +58,14 @@ def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: key_schema_class = config.key_schema if value_schema_class is not None: schema = self.schema_provider.provide_schema( - value_schema_class, to_section.models + value_schema_class, to_section.models, ) self.__submit_value_schema( - schema, value_schema_class, dry_run, topic_name + schema, value_schema_class, dry_run, topic_name, ) if key_schema_class is not None: schema = self.schema_provider.provide_schema( - key_schema_class, to_section.models + key_schema_class, to_section.models, ) self.__submit_key_schema(schema, key_schema_class, dry_run, topic_name) @@ -119,25 +119,25 @@ def __submit_schema( else: log.info( greenify( - f"Schema Submission: The subject {subject} will be submitted." - ) + f"Schema Submission: The subject {subject} will be submitted.", + ), ) else: self.schema_registry_client.register(subject=subject, schema=schema) log.info( - f"Schema Submission: schema submitted for {subject} with model {schema_class}." + f"Schema Submission: schema submitted for {subject} with model {schema_class}.", ) def __subject_exists(self, subject: str) -> bool: return len(self.schema_registry_client.get_versions(subject)) > 0 def __check_compatibility( - self, schema: Schema, schema_class: str, subject: str + self, schema: Schema, schema_class: str, subject: str, ) -> None: registered_version = self.schema_registry_client.check_version(subject, schema) if registered_version is None: if not self.schema_registry_client.test_compatibility( - subject=subject, schema=schema + subject=subject, schema=schema, ): schema_str = ( schema.flat_schema @@ -145,15 +145,15 @@ def __check_compatibility( else str(schema) ) raise Exception( - f"Schema is not compatible for {subject} and model {schema_class}. \n {json.dumps(schema_str, indent=4)}" + f"Schema is not compatible for {subject} and model {schema_class}. \n {json.dumps(schema_str, indent=4)}", ) else: log.debug( - f"Schema Submission: schema was already submitted for the subject {subject} as version {registered_version.schema}. Therefore, the specified schema must be compatible." + f"Schema Submission: schema was already submitted for the subject {subject} as version {registered_version.schema}. Therefore, the specified schema must be compatible.", ) log.info( - f"Schema Submission: compatible schema for {subject} with model {schema_class}." + f"Schema Submission: compatible schema for {subject} with model {schema_class}.", ) def __delete_subject(self, subject: str, dry_run: bool) -> None: @@ -162,5 +162,5 @@ def __delete_subject(self, subject: str, dry_run: bool) -> None: else: version_list = self.schema_registry_client.delete_subject(subject) log.info( - f"Schema Deletion: deleted {len(version_list)} versions for subject {subject}." + f"Schema Deletion: deleted {len(version_list)} versions for subject {subject}.", ) diff --git a/kpops/component_handlers/schema_handler/schema_provider.py b/kpops/component_handlers/schema_handler/schema_provider.py index 2b93bf943..ba7990ce1 100644 --- a/kpops/component_handlers/schema_handler/schema_provider.py +++ b/kpops/component_handlers/schema_handler/schema_provider.py @@ -13,6 +13,6 @@ class SchemaProvider(ABC): @abstractmethod def provide_schema( - self, schema_class: str, models: dict[ModelName, ModelVersion] + self, schema_class: str, models: dict[ModelName, ModelVersion], ) -> Schema: ... diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index afc4f6e77..b436b20e3 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -35,10 +35,10 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: try: self.proxy_wrapper.get_topic(topic_name=topic_name) topic_config_in_cluster = self.proxy_wrapper.get_topic_config( - topic_name=topic_name + topic_name=topic_name, ) differences = self.__get_topic_config_diff( - topic_config_in_cluster, topic_config.configs + topic_config_in_cluster, topic_config.configs, ) if differences: @@ -46,11 +46,11 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: for difference in differences: if difference.diff_type is DiffType.REMOVE: json_body.append( - {"name": difference.key, "operation": "DELETE"} + {"name": difference.key, "operation": "DELETE"}, ) elif config_value := difference.change.new_value: json_body.append( - {"name": difference.key, "value": config_value} + {"name": difference.key, "value": config_value}, ) self.proxy_wrapper.batch_alter_topic_config( topic_name=topic_name, @@ -59,7 +59,7 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: else: log.info( - f"Topic Creation: config of topic {topic_name} didn't change. Skipping update." + f"Topic Creation: config of topic {topic_name} didn't change. Skipping update.", ) except TopicNotFoundException: self.proxy_wrapper.create_topic(topic_spec=topic_spec) @@ -74,15 +74,15 @@ def delete_topics(self, to_section: ToSection, dry_run: bool) -> None: self.proxy_wrapper.delete_topic(topic_name=topic_name) except TopicNotFoundException: log.warning( - f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping." + f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping.", ) @staticmethod def __get_topic_config_diff( - cluster_config: TopicConfigResponse, current_config: dict + cluster_config: TopicConfigResponse, current_config: dict, ) -> list[Diff]: comparable_in_cluster_config_dict, _ = parse_rest_proxy_topic_config( - cluster_config + cluster_config, ) return list(Diff.from_dicts(comparable_in_cluster_config_dict, current_config)) @@ -97,10 +97,10 @@ def __dry_run_topic_creation( topic_name = topic_in_cluster.topic_name if topic_config: topic_config_in_cluster = self.proxy_wrapper.get_topic_config( - topic_name=topic_name + topic_name=topic_name, ) in_cluster_config, new_config = parse_and_compare_topic_configs( - topic_config_in_cluster, topic_config.configs + topic_config_in_cluster, topic_config.configs, ) if diff := render_diff(in_cluster_config, new_config): log.info(f"Config changes for topic {topic_name}:") @@ -120,13 +120,13 @@ def __dry_run_topic_creation( self.__check_partition_count(topic_in_cluster, topic_spec, effective_config) self.__check_replication_factor( - topic_in_cluster, topic_spec, effective_config + topic_in_cluster, topic_spec, effective_config, ) except TopicNotFoundException: log.info( greenify( - f"Topic Creation: {topic_name} does not exist in the cluster. Creating topic." - ) + f"Topic Creation: {topic_name} does not exist in the cluster. Creating topic.", + ), ) log.debug(f"POST /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1") log.debug(f"Host: {self.proxy_wrapper.host}") @@ -145,11 +145,11 @@ def __check_partition_count( topic_spec.partitions_count or int(broker_config["num.partitions"]) ): log.debug( - f"Topic Creation: partition count of topic {topic_name} did not change. Current partitions count {partition_count}. Updating configs." + f"Topic Creation: partition count of topic {topic_name} did not change. Current partitions count {partition_count}. Updating configs.", ) else: raise TopicTransactionError( - f"Topic Creation: partition count of topic {topic_name} changed! Partitions count of topic {topic_name} is {partition_count}. The given partitions count {topic_spec.partitions_count}." + f"Topic Creation: partition count of topic {topic_name} changed! Partitions count of topic {topic_name} is {partition_count}. The given partitions count {topic_spec.partitions_count}.", ) @staticmethod @@ -165,11 +165,11 @@ def __check_replication_factor( or int(broker_config["default.replication.factor"]) ): log.debug( - f"Topic Creation: replication factor of topic {topic_name} did not change. Current replication factor {replication_factor}. Updating configs." + f"Topic Creation: replication factor of topic {topic_name} did not change. Current replication factor {replication_factor}. Updating configs.", ) else: raise TopicTransactionError( - f"Topic Creation: replication factor of topic {topic_name} changed! Replication factor of topic {topic_name} is {replication_factor}. The given replication count {topic_spec.replication_factor}." + f"Topic Creation: replication factor of topic {topic_name} changed! Replication factor of topic {topic_name} is {replication_factor}. The given replication count {topic_spec.replication_factor}.", ) def __dry_run_topic_deletion(self, topic_name: str) -> None: @@ -177,15 +177,15 @@ def __dry_run_topic_deletion(self, topic_name: str) -> None: topic_in_cluster = self.proxy_wrapper.get_topic(topic_name=topic_name) log.info( magentaify( - f"Topic Deletion: topic {topic_in_cluster.topic_name} exists in the cluster. Deleting topic." - ) + f"Topic Deletion: topic {topic_in_cluster.topic_name} exists in the cluster. Deleting topic.", + ), ) log.debug( - f"DELETE /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1" + f"DELETE /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1", ) except TopicNotFoundException: log.warning( - f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping." + f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping.", ) log.debug(f"Host: {self.proxy_wrapper.host}") log.debug(HEADERS) diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 88fc6e310..1a36e4b50 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -26,7 +26,7 @@ class ProxyWrapper: def __init__(self, pipeline_config: PipelineConfig) -> None: if not pipeline_config.kafka_rest_host: raise ValueError( - "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST." + "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST.", ) self._host = pipeline_config.kafka_rest_host diff --git a/kpops/component_handlers/topic/utils.py b/kpops/component_handlers/topic/utils.py index 70f71d0b3..0b5576d1f 100644 --- a/kpops/component_handlers/topic/utils.py +++ b/kpops/component_handlers/topic/utils.py @@ -6,17 +6,17 @@ def parse_and_compare_topic_configs( - topic_config_in_cluster: TopicConfigResponse, topic_config: dict + topic_config_in_cluster: TopicConfigResponse, topic_config: dict, ) -> tuple[dict, dict]: comparable_in_cluster_config_dict, default_configs = parse_rest_proxy_topic_config( - topic_config_in_cluster + topic_config_in_cluster, ) cluster_defaults_overwrite = set(topic_config.keys()) - set( - comparable_in_cluster_config_dict.keys() + comparable_in_cluster_config_dict.keys(), ) config_overwrites = set(comparable_in_cluster_config_dict.keys()) - set( - topic_config.keys() + topic_config.keys(), ) populate_default_configs( cluster_defaults_overwrite, diff --git a/kpops/component_handlers/utils/exception.py b/kpops/component_handlers/utils/exception.py index fe906190f..00bdca315 100644 --- a/kpops/component_handlers/utils/exception.py +++ b/kpops/component_handlers/utils/exception.py @@ -11,7 +11,7 @@ def __init__(self, response: httpx.Response) -> None: self.error_msg = "Something went wrong!" try: log.error( - f"The request responded with the code {self.error_code}. Error body: {response.json()}" + f"The request responded with the code {self.error_code}. Error body: {response.json()}", ) response.raise_for_status() except httpx.HTTPError as e: diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index a02cc1417..e789182ff 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -93,17 +93,17 @@ def extend_with_defaults(self, **kwargs) -> dict: config: PipelineConfig = kwargs["config"] log.debug( typer.style( - "Enriching component of type ", fg=typer.colors.GREEN, bold=False + "Enriching component of type ", fg=typer.colors.GREEN, bold=False, ) + typer.style( - kwargs.get("type"), fg=typer.colors.GREEN, bold=True, underline=True - ) + kwargs.get("type"), fg=typer.colors.GREEN, bold=True, underline=True, + ), ) main_default_file_path, environment_default_file_path = get_defaults_file_paths( - config + config, ) defaults = load_defaults( - self.__class__, main_default_file_path, environment_default_file_path + self.__class__, main_default_file_path, environment_default_file_path, ) kwargs = update_nested(kwargs, defaults) return kwargs @@ -166,13 +166,13 @@ def defaults_from_yaml(path: Path, key: str) -> dict: content = load_yaml_file(path, substitution=ENV) if not isinstance(content, dict): raise TypeError( - "Default files should be structured as map ([app type] -> [default config]" + "Default files should be structured as map ([app type] -> [default config]", ) value = content.get(key) if value is None: return {} log.debug( - f"\tFound defaults for component type {typer.style(key, bold=True, fg=typer.colors.MAGENTA)} in file: {path}" + f"\tFound defaults for component type {typer.style(key, bold=True, fg=typer.colors.MAGENTA)} in file: {path}", ) return value @@ -189,11 +189,11 @@ def get_defaults_file_paths(config: PipelineConfig) -> tuple[Path, Path]: """ defaults_dir = Path(config.defaults_path).resolve() main_default_file_path = defaults_dir / Path( - config.defaults_filename_prefix + config.defaults_filename_prefix, ).with_suffix(".yaml") environment_default_file_path = defaults_dir / Path( - f"{config.defaults_filename_prefix}_{config.environment}" + f"{config.defaults_filename_prefix}_{config.environment}", ).with_suffix(".yaml") return main_default_file_path, environment_default_file_path diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index c6919bf3e..c217a3c92 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -30,7 +30,7 @@ class KafkaStreamsConfig(BaseModel): brokers: str = Field(default=..., description=describe_attr("brokers", __doc__)) schema_registry_url: str | None = Field( - default=None, description=describe_attr("schema_registry_url", __doc__) + default=None, description=describe_attr("schema_registry_url", __doc__), ) class Config(CamelCaseConfig, DescConfig): @@ -45,10 +45,10 @@ class KafkaAppConfig(KubernetesAppConfig): """ streams: KafkaStreamsConfig = Field( - default=..., description=describe_attr("streams", __doc__) + default=..., description=describe_attr("streams", __doc__), ) name_override: str | None = Field( - default=None, description=describe_attr("name_override", __doc__) + default=None, description=describe_attr("name_override", __doc__), ) @@ -89,12 +89,12 @@ def clean_up_helm_chart(self) -> str: def deploy(self, dry_run: bool) -> None: if self.to: self.handlers.topic_handler.create_topics( - to_section=self.to, dry_run=dry_run + to_section=self.to, dry_run=dry_run, ) if self.handlers.schema_handler: self.handlers.schema_handler.submit_schemas( - to_section=self.to, dry_run=dry_run + to_section=self.to, dry_run=dry_run, ) super().deploy(dry_run) @@ -113,7 +113,7 @@ def _run_clean_up_job( """ suffix = "-clean" clean_up_release_name = trim_release_name( - self.helm_release_name + suffix, suffix + self.helm_release_name + suffix, suffix, ) log.info(f"Uninstall old cleanup job for {clean_up_release_name}") @@ -122,7 +122,7 @@ def _run_clean_up_job( log.info(f"Init cleanup job for {clean_up_release_name}") stdout = self.__install_clean_up_job( - clean_up_release_name, suffix, values, dry_run + clean_up_release_name, suffix, values, dry_run, ) if dry_run: diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index b63aaebda..2d6c8c8bc 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -65,7 +65,7 @@ class KafkaConnector(PipelineComponent, ABC): description=describe_attr("repo_config", __doc__), ) version: str | None = Field( - default="1.0.4", description=describe_attr("version", __doc__) + default="1.0.4", description=describe_attr("version", __doc__), ) resetter_values: dict = Field( default_factory=dict, @@ -138,12 +138,12 @@ def template_flags(self) -> HelmTemplateFlags: def deploy(self, dry_run: bool) -> None: if self.to: self.handlers.topic_handler.create_topics( - to_section=self.to, dry_run=dry_run + to_section=self.to, dry_run=dry_run, ) if self.handlers.schema_handler: self.handlers.schema_handler.submit_schemas( - to_section=self.to, dry_run=dry_run + to_section=self.to, dry_run=dry_run, ) self.handlers.connector_handler.create_connector(self.app, dry_run=dry_run) @@ -151,7 +151,7 @@ def deploy(self, dry_run: bool) -> None: @override def destroy(self, dry_run: bool) -> None: self.handlers.connector_handler.destroy_connector( - self.full_name, dry_run=dry_run + self.full_name, dry_run=dry_run, ) @override @@ -159,7 +159,7 @@ def clean(self, dry_run: bool) -> None: if self.to: if self.handlers.schema_handler: self.handlers.schema_handler.delete_schemas( - to_section=self.to, dry_run=dry_run + to_section=self.to, dry_run=dry_run, ) self.handlers.topic_handler.delete_topics(self.to, dry_run=dry_run) @@ -181,22 +181,22 @@ def _run_connect_resetter( """ log.info( magentaify( - f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.full_name}" - ) + f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.full_name}", + ), ) self.__uninstall_connect_resetter(self._resetter_release_name, dry_run) log.info( magentaify( - f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}" - ) + f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}", + ), ) stdout = self.__install_connect_resetter(dry_run, **kwargs) if dry_run: self.dry_run_handler.print_helm_diff( - stdout, self._resetter_release_name, log + stdout, self._resetter_release_name, log, ) if not retain_clean_jobs: @@ -359,7 +359,7 @@ def clean(self, dry_run: bool) -> None: self.__run_kafka_connect_resetter(dry_run, delete_consumer_group=True) def __run_kafka_connect_resetter( - self, dry_run: bool, delete_consumer_group: bool + self, dry_run: bool, delete_consumer_group: bool, ) -> None: """Run the connector resetter. diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 57c51a187..b2676da7c 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -25,7 +25,7 @@ log = logging.getLogger("KubernetesAppComponent") KUBERNETES_NAME_CHECK_PATTERN = re.compile( - r"^(?![0-9]+$)(?!.*-$)(?!-)[a-z0-9-.]{1,253}(? str: def helm_chart(self) -> str: """Return component's Helm chart.""" raise NotImplementedError( - f"Please implement the helm_chart property of the {self.__module__} module." + f"Please implement the helm_chart property of the {self.__module__} module.", ) @property @@ -171,7 +171,7 @@ def print_helm_diff(self, stdout: str) -> None: :param stdout: The output of a Helm command that installs or upgrades the release """ current_release = list( - self.helm.get_manifest(self.helm_release_name, self.namespace) + self.helm.get_manifest(self.helm_release_name, self.namespace), ) if current_release: log.info(f"Helm release {self.helm_release_name} already exists") diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index fdef7782f..aea159eb2 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -27,7 +27,7 @@ class FromTopic(BaseModel): """ type: InputTopicTypes | None = Field( - default=None, description=describe_attr("type", __doc__) + default=None, description=describe_attr("type", __doc__), ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index c10f27c23..00393ee4e 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -31,7 +31,7 @@ class TopicConfig(BaseModel): """ type: OutputTopicTypes | None = Field( - default=None, title="Topic type", description=describe_attr("type", __doc__) + default=None, title="Topic type", description=describe_attr("type", __doc__), ) key_schema: str | None = Field( default=None, @@ -54,7 +54,7 @@ class TopicConfig(BaseModel): description=describe_attr("replication_factor", __doc__), ) configs: dict[str, str | int] = Field( - default={}, description=describe_attr("configs", __doc__) + default={}, description=describe_attr("configs", __doc__), ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) @@ -79,10 +79,10 @@ class ToSection(BaseModel): """ topics: dict[TopicName, TopicConfig] = Field( - default={}, description=describe_attr("topics", __doc__) + default={}, description=describe_attr("topics", __doc__), ) models: dict[ModelName, ModelVersion] = Field( - default={}, description=describe_attr("models", __doc__) + default={}, description=describe_attr("models", __doc__), ) class Config(DescConfig): diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 8af1a68c6..ad948bfcc 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -15,10 +15,10 @@ class ProducerStreamsConfig(KafkaStreamsConfig): """ extra_output_topics: dict[str, str] = Field( - default={}, description=describe_attr("extra_output_topics", __doc__) + default={}, description=describe_attr("extra_output_topics", __doc__), ) output_topic: str | None = Field( - default=None, description=describe_attr("output_topic", __doc__) + default=None, description=describe_attr("output_topic", __doc__), ) @@ -29,7 +29,7 @@ class ProducerValues(KafkaAppConfig): """ streams: ProducerStreamsConfig = Field( - default=..., description=describe_attr("streams", __doc__) + default=..., description=describe_attr("streams", __doc__), ) class Config(BaseConfig): diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index ece5c042b..28ec5059e 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -27,28 +27,28 @@ class StreamsConfig(KafkaStreamsConfig): """ input_topics: list[str] = Field( - default=[], description=describe_attr("input_topics", __doc__) + default=[], description=describe_attr("input_topics", __doc__), ) input_pattern: str | None = Field( - default=None, description=describe_attr("input_pattern", __doc__) + default=None, description=describe_attr("input_pattern", __doc__), ) extra_input_topics: dict[str, list[str]] = Field( - default={}, description=describe_attr("extra_input_topics", __doc__) + default={}, description=describe_attr("extra_input_topics", __doc__), ) extra_input_patterns: dict[str, str] = Field( - default={}, description=describe_attr("extra_input_patterns", __doc__) + default={}, description=describe_attr("extra_input_patterns", __doc__), ) extra_output_topics: dict[str, str] = Field( - default={}, description=describe_attr("extra_output_topics", __doc__) + default={}, description=describe_attr("extra_output_topics", __doc__), ) output_topic: str | None = Field( - default=None, description=describe_attr("output_topic", __doc__) + default=None, description=describe_attr("output_topic", __doc__), ) error_topic: str | None = Field( - default=None, description=describe_attr("error_topic", __doc__) + default=None, description=describe_attr("error_topic", __doc__), ) config: dict[str, str] = Field( - default={}, description=describe_attr("config", __doc__) + default={}, description=describe_attr("config", __doc__), ) def add_input_topics(self, topics: list[str]) -> None: @@ -69,7 +69,7 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: :param role: Topic role """ self.extra_input_topics[role] = deduplicate( - self.extra_input_topics.get(role, []) + topics + self.extra_input_topics.get(role, []) + topics, ) @override diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 7b3515e1c..0ed15e9b1 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -64,13 +64,13 @@ def validate_unique_names(self) -> None: duplicates = [name for name, count in Counter(step_names).items() if count > 1] if duplicates: raise ValidationError( - f"step names should be unique. duplicate step names: {', '.join(duplicates)}" + f"step names should be unique. duplicate step names: {', '.join(duplicates)}", ) @staticmethod def _populate_component_name(component: PipelineComponent) -> None: # TODO: remove with suppress( - AttributeError # Some components like Kafka Connect do not have a name_override attribute + AttributeError, # Some components like Kafka Connect do not have a name_override attribute ): if (app := getattr(component, "app")) and app.name_override is None: app.name_override = component.full_name @@ -88,7 +88,7 @@ def create_env_components_index( for component in environment_components: if "type" not in component or "name" not in component: raise ValueError( - "To override components per environment, every component should at least have a type and a name." + "To override components per environment, every component should at least have a type and a name.", ) index[component["name"]] = component return index @@ -138,14 +138,14 @@ def load_from_yaml( main_content = load_yaml_file(path, substitution=ENV) if not isinstance(main_content, list): raise TypeError( - f"The pipeline definition {path} should contain a list of components" + f"The pipeline definition {path} should contain a list of components", ) env_content = [] if (env_file := Pipeline.pipeline_filename_environment(path, config)).exists(): env_content = load_yaml_file(env_file, substitution=ENV) if not isinstance(env_content, list): raise TypeError( - f"The pipeline definition {env_file} should contain a list of components" + f"The pipeline definition {env_file} should contain a list of components", ) pipeline = cls(main_content, env_content, registry, config, handlers) @@ -165,20 +165,20 @@ def parse_components(self, component_list: list[dict]) -> None: component_type: str = component_data["type"] except KeyError as ke: raise ValueError( - "Every component must have a type defined, this component does not have one." + "Every component must have a type defined, this component does not have one.", ) from ke component_class = self.registry[component_type] self.apply_component(component_class, component_data) except Exception as ex: # noqa: BLE001 if "name" in component_data: raise ParsingException( - f"Error enriching {component_data['type']} component {component_data['name']}" + f"Error enriching {component_data['type']} component {component_data['name']}", ) from ex else: raise ParsingException() from ex def apply_component( - self, component_class: type[PipelineComponent], component_data: dict + self, component_class: type[PipelineComponent], component_data: dict, ) -> None: """Instantiate, enrich and inflate pipeline component. @@ -205,14 +205,14 @@ def apply_component( from_topic, ) in enriched_component.from_.components.items(): original_from_component = self.components.find( - original_from_component_name + original_from_component_name, ) inflated_from_component = original_from_component.inflate()[-1] resolved_from_component = self.components.find( - inflated_from_component.name + inflated_from_component.name, ) enriched_component.weave_from_topics( - resolved_from_component.to, from_topic + resolved_from_component.to, from_topic, ) elif self.components: # read from previous component @@ -260,7 +260,7 @@ def print_yaml(self, substitution: dict | None = None) -> None: theme="ansi_dark", ) Console( - width=1000 # HACK: overwrite console width to avoid truncating output + width=1000, # HACK: overwrite console width to avoid truncating output ).print(syntax) def __iter__(self) -> Iterator[PipelineComponent]: @@ -269,8 +269,8 @@ def __iter__(self) -> Iterator[PipelineComponent]: def __str__(self) -> str: return yaml.dump( json.loads( # HACK: serialize types on Pydantic model export, which are not serialized by .dict(); e.g. pathlib.Path - self.components.json(exclude_none=True, by_alias=True) - ) + self.components.json(exclude_none=True, by_alias=True), + ), ) def __len__(self) -> int: @@ -296,14 +296,14 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: substitution_hardcoded, ) substitution = generate_substitution( - json.loads(config.json()), existing_substitution=component_substitution + json.loads(config.json()), existing_substitution=component_substitution, ) return json.loads( substitute_nested( json.dumps(component_as_dict), **update_nested_pair(substitution, ENV), - ) + ), ) def validate(self) -> None: diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index dbc53f67c..da47bd620 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -51,7 +51,7 @@ class Diff(Generic[T]): @staticmethod def from_dicts( - d1: dict, d2: dict, ignore: set[str] | None = None + d1: dict, d2: dict, ignore: set[str] | None = None, ) -> Iterator[Diff]: for diff_type, keys, changes in diff(d1, d2, ignore=ignore): if not isinstance(changes, list): @@ -86,8 +86,8 @@ def render_diff(d1: dict, d2: dict, ignore: set[str] | None = None) -> str | Non differ.compare( to_yaml(d1) if d1 else "", to_yaml(d2_filtered) if d2_filtered else "", - ) - ) + ), + ), ) diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index 94c9003f4..0f4643043 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -47,7 +47,7 @@ def update_nested(*argv: dict) -> dict: def flatten_mapping( - nested_mapping: Mapping[str, Any], prefix: str | None = None, separator: str = "_" + nested_mapping: Mapping[str, Any], prefix: str | None = None, separator: str = "_", ) -> dict[str, Any]: """Flattens a Mapping. diff --git a/kpops/utils/environment.py b/kpops/utils/environment.py index 0ed7ae920..b1b2271b4 100644 --- a/kpops/utils/environment.py +++ b/kpops/utils/environment.py @@ -13,7 +13,7 @@ def __init__(self, mapping=None, /, **kwargs) -> None: mapping = {} if kwargs: mapping.update( - {transformation(key): value for key, value in kwargs.items()} + {transformation(key): value for key, value in kwargs.items()}, ) super().__init__(mapping) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index f202d0706..51c64ce2c 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -37,7 +37,7 @@ def field_schema(field: ModelField, **kwargs: Any) -> Any: def _is_valid_component( - defined_component_types: set[str], component: type[PipelineComponent] + defined_component_types: set[str], component: type[PipelineComponent], ) -> bool: """Check whether a PipelineComponent subclass has a valid definition for the schema generation. @@ -56,7 +56,7 @@ def _is_valid_component( def _add_components( - components_module: str, components: tuple[type[PipelineComponent]] | None = None + components_module: str, components: tuple[type[PipelineComponent]] | None = None, ) -> tuple[type[PipelineComponent]]: """Add components to a components tuple. @@ -82,7 +82,7 @@ def _add_components( def gen_pipeline_schema( - components_module: str | None = None, include_stock_components: bool = True + components_module: str | None = None, include_stock_components: bool = True, ) -> None: """Generate a json schema from the models of pipeline components. @@ -123,7 +123,7 @@ def gen_pipeline_schema( ) AnnotatedPipelineComponents = Annotated[ - PipelineComponents, Field(discriminator="type") + PipelineComponents, Field(discriminator="type"), ] schema = schema_json_of( @@ -139,6 +139,6 @@ def gen_pipeline_schema( def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config.""" schema = schema_json_of( - PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True + PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True, ) print(schema) diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml_loading.py index cb9536200..9dc53c1ab 100644 --- a/kpops/utils/yaml_loading.py +++ b/kpops/utils/yaml_loading.py @@ -9,7 +9,7 @@ def generate_hashkey( - file_path: Path, substitution: Mapping[str, Any] | None = None + file_path: Path, substitution: Mapping[str, Any] | None = None, ) -> tuple: if substitution is None: substitution = {} @@ -18,7 +18,7 @@ def generate_hashkey( @cached(cache={}, key=generate_hashkey) def load_yaml_file( - file_path: Path, *, substitution: Mapping[str, Any] | None = None + file_path: Path, *, substitution: Mapping[str, Any] | None = None, ) -> dict | list[dict]: with open(file_path) as yaml_file: return yaml.load(substitute(yaml_file.read(), substitution), Loader=yaml.Loader) @@ -71,6 +71,6 @@ def substitute_nested(input: str, **kwargs) -> str: old_str, new_str = new_str, substitute(new_str, kwargs) if new_str != old_str: raise ValueError( - "An infinite loop condition detected. Check substitution variables." + "An infinite loop condition detected. Check substitution variables.", ) return old_str diff --git a/tests/cli/resources/module.py b/tests/cli/resources/module.py index 3956eedf2..4014d6ec4 100644 --- a/tests/cli/resources/module.py +++ b/tests/cli/resources/module.py @@ -9,6 +9,6 @@ class CustomSchemaProvider(SchemaProvider): def provide_schema( - self, schema_class: str, models: dict[ModelName, ModelVersion] + self, schema_class: str, models: dict[ModelName, ModelVersion], ) -> Schema: return AvroSchema() diff --git a/tests/cli/test_pipeline_steps.py b/tests/cli/test_pipeline_steps.py index a09d7b064..1d1cafbf1 100644 --- a/tests/cli/test_pipeline_steps.py +++ b/tests/cli/test_pipeline_steps.py @@ -45,7 +45,7 @@ def log_info(mocker: MockerFixture) -> MagicMock: def tests_filter_steps_to_apply(log_info: MagicMock, pipeline: Pipeline): filtered_steps = get_steps_to_apply( - pipeline, "example2,example3", FilterType.INCLUDE + pipeline, "example2,example3", FilterType.INCLUDE, ) assert len(filtered_steps) == 2 @@ -54,7 +54,7 @@ def tests_filter_steps_to_apply(log_info: MagicMock, pipeline: Pipeline): assert log_info.call_count == 1 log_info.assert_any_call( - "The following steps are included:\n['example2', 'example3']" + "The following steps are included:\n['example2', 'example3']", ) filtered_steps = get_steps_to_apply(pipeline, None, FilterType.INCLUDE) @@ -66,7 +66,7 @@ def tests_filter_steps_to_apply(log_info: MagicMock, pipeline: Pipeline): def tests_filter_steps_to_exclude(log_info: MagicMock, pipeline: Pipeline): filtered_steps = get_steps_to_apply( - pipeline, "example2,example3", FilterType.EXCLUDE + pipeline, "example2,example3", FilterType.EXCLUDE, ) assert len(filtered_steps) == 1 diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index 5223c4c21..fe66a5990 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -75,7 +75,7 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent): """ example_attr: str = Field( - default=..., description=describe_attr("example_attr", __doc__) + default=..., description=describe_attr("example_attr", __doc__), ) @@ -83,7 +83,7 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent): @pytest.mark.filterwarnings( - "ignore:handlers", "ignore:config", "ignore:enrich", "ignore:validate" + "ignore:handlers", "ignore:config", "ignore:enrich", "ignore:validate", ) class TestGenSchema: def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): @@ -101,7 +101,7 @@ def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): "root", logging.WARNING, "No components are provided, no schema is generated.", - ) + ), ] assert result.exit_code == 0 diff --git a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py index 20c02f50d..df44f3e1e 100644 --- a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py +++ b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py @@ -15,13 +15,13 @@ class TestDryRunHandler: @pytest.fixture def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm" + "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm", ).return_value @pytest.fixture def helm_diff_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.helm_wrapper.dry_run_handler.HelmDiff" + "kpops.component_handlers.helm_wrapper.dry_run_handler.HelmDiff", ).return_value def test_should_print_helm_diff_when_release_is_new( @@ -42,7 +42,7 @@ def test_should_print_helm_diff_when_release_is_new( dry_run_handler.print_helm_diff("A test stdout", "a-release-name", log) helm_mock.get_manifest.assert_called_once_with( - "a-release-name", "test-namespace" + "a-release-name", "test-namespace", ) assert "Helm release a-release-name does not exist" in caplog.text mock_load_manifest.assert_called_once_with("A test stdout") @@ -55,7 +55,7 @@ def test_should_print_helm_diff_when_release_exists( caplog: LogCaptureFixture, ): helm_mock.get_manifest.return_value = iter( - [HelmTemplate("path.yaml", {"a": 1})] + [HelmTemplate("path.yaml", {"a": 1})], ) mock_load_manifest = mocker.patch( "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm.load_manifest", @@ -67,7 +67,7 @@ def test_should_print_helm_diff_when_release_exists( dry_run_handler.print_helm_diff("A test stdout", "a-release-name", log) helm_mock.get_manifest.assert_called_once_with( - "a-release-name", "test-namespace" + "a-release-name", "test-namespace", ) assert "Helm release a-release-name already exists" in caplog.text mock_load_manifest.assert_called_once_with("A test stdout") diff --git a/tests/component_handlers/helm_wrapper/test_helm_diff.py b/tests/component_handlers/helm_wrapper/test_helm_diff.py index 15a58a023..fc423cf20 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_diff.py +++ b/tests/component_handlers/helm_wrapper/test_helm_diff.py @@ -24,7 +24,7 @@ def test_diff(): HelmTemplate("a.yaml", {"a": 2}), HelmTemplate("c.yaml", {"c": 1}), ], - ) + ), ) == [ Change( old_value={"a": 1}, @@ -42,7 +42,7 @@ def test_diff(): # test no current release assert list( - helm_diff.calculate_changes((), [HelmTemplate("a.yaml", {"a": 1})]) + helm_diff.calculate_changes((), [HelmTemplate("a.yaml", {"a": 1})]), ) == [ Change( old_value={}, diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py index de23dca8e..1afdb7885 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py +++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py @@ -44,7 +44,7 @@ def mock_get_version(self, mocker: MockerFixture) -> MagicMock: return mock_get_version def test_should_call_run_command_method_when_helm_install_with_defaults( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -74,7 +74,7 @@ def test_should_call_run_command_method_when_helm_install_with_defaults( ) def test_should_include_configured_tls_parameters_on_add_when_version_is_old( - self, run_command: MagicMock, mocker: MockerFixture + self, run_command: MagicMock, mocker: MockerFixture, ): mock_get_version = mocker.patch.object(Helm, "get_version") mock_get_version.return_value = Version(major=3, minor=6, patch=0) @@ -104,7 +104,7 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_old( ] def test_should_include_configured_tls_parameters_on_add_when_version_is_new( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm = Helm(HelmConfig()) @@ -132,7 +132,7 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_new( ] def test_should_include_configured_tls_parameters_on_update( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.upgrade_install( @@ -168,7 +168,7 @@ def test_should_include_configured_tls_parameters_on_update( ) def test_should_call_run_command_method_when_helm_install_with_non_defaults( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.upgrade_install( @@ -213,7 +213,7 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults( ) def test_should_call_run_command_method_when_uninstalling_streams_app( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.uninstall( @@ -240,11 +240,11 @@ def test_should_log_warning_when_release_not_found( ) log_warning_mock.assert_called_once_with( - "Release with name test-release not found. Could not uninstall app." + "Release with name test-release not found. Could not uninstall app.", ) def test_should_call_run_command_method_when_installing_streams_app__with_dry_run( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -267,7 +267,7 @@ def test_should_call_run_command_method_when_installing_streams_app__with_dry_ru def test_validate_console_output(self): with pytest.raises(RuntimeError): Helm.parse_helm_command_stderr_output( - "A specific\n eRrOr was found in this line" + "A specific\n eRrOr was found in this line", ) with pytest.raises(ReleaseNotFoundException): Helm.parse_helm_command_stderr_output("New \nmessage\n ReLease: noT foUnD") @@ -275,13 +275,13 @@ def test_validate_console_output(self): Helm.parse_helm_command_stderr_output("This is \njust WaRnIng nothing more") except RuntimeError as e: pytest.fail( - f"validate_console_output() raised RuntimeError unexpectedly!\nError message: {e}" + f"validate_console_output() raised RuntimeError unexpectedly!\nError message: {e}", ) try: Helm.parse_helm_command_stderr_output("This is \njust WaRnIng nothing more") except ReleaseNotFoundException: pytest.fail( - f"validate_console_output() raised ReleaseNotFoundException unexpectedly!\nError message: {ReleaseNotFoundException}" + f"validate_console_output() raised ReleaseNotFoundException unexpectedly!\nError message: {ReleaseNotFoundException}", ) def test_helm_template_load(self): @@ -294,7 +294,7 @@ def test_helm_template_load(self): metadata: labels: foo: bar - """ + """, ) helm_template = HelmTemplate.load("test2.yaml", stdout) @@ -317,7 +317,7 @@ def test_load_manifest_with_no_notes(self): --- # Source: chart/templates/test3b.yaml foo: bar - """ + """, ) helm_templates = list(Helm.load_manifest(stdout)) assert len(helm_templates) == 2 @@ -334,7 +334,7 @@ def test_raise_parse_error_when_helm_content_is_invalid(self): """ --- # Resource: chart/templates/test1.yaml - """ + """, ) with pytest.raises(ParseError, match="Not a valid Helm template source"): helm_template = list(Helm.load_manifest(stdout)) @@ -386,7 +386,7 @@ def test_load_manifest(self): NOTES: test - """ + """, ) helm_templates = list(Helm.load_manifest(stdout)) assert len(helm_templates) == 2 @@ -399,7 +399,7 @@ def test_load_manifest(self): assert helm_templates[1].template == {"foo": "bar"} def test_helm_get_manifest( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) run_command.return_value = dedent( @@ -409,10 +409,10 @@ def test_helm_get_manifest( data: - a: 1 - b: 2 - """ + """, ) helm_templates = list( - helm_wrapper.get_manifest("test-release", "test-namespace") + helm_wrapper.get_manifest("test-release", "test-namespace"), ) run_command.assert_called_once_with( command=[ @@ -432,7 +432,7 @@ def test_helm_get_manifest( assert helm_wrapper.get_manifest("test-release", "test-namespace") == () def test_should_call_run_command_method_when_helm_template_with_optional_args( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -470,7 +470,7 @@ def test_should_call_run_command_method_when_helm_template_with_optional_args( ) def test_should_call_run_command_method_when_helm_template_without_optional_args( - self, run_command: MagicMock, mock_get_version: MagicMock + self, run_command: MagicMock, mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -526,7 +526,7 @@ def test_should_call_helm_version( assert helm._version == expected_version def test_should_raise_exception_if_helm_version_is_old( - self, run_command: MagicMock + self, run_command: MagicMock, ): run_command.return_value = "v2.9.0+gc9f554d" with pytest.raises( @@ -536,10 +536,10 @@ def test_should_raise_exception_if_helm_version_is_old( Helm(helm_config=HelmConfig()) def test_should_raise_exception_if_helm_version_cannot_be_parsed( - self, run_command: MagicMock + self, run_command: MagicMock, ): run_command.return_value = "123" with pytest.raises( - RuntimeError, match="Could not parse the Helm version.\n\nHelm output:\n123" + RuntimeError, match="Could not parse the Helm version.\n\nHelm output:\n123", ): Helm(helm_config=HelmConfig()) diff --git a/tests/component_handlers/helm_wrapper/test_utils.py b/tests/component_handlers/helm_wrapper/test_utils.py index eef6ca14f..8f40b0c5d 100644 --- a/tests/component_handlers/helm_wrapper/test_utils.py +++ b/tests/component_handlers/helm_wrapper/test_utils.py @@ -12,7 +12,7 @@ def test_trim_release_name_with_suffix(): def test_trim_release_name_without_suffix(): name = trim_release_name( - "example-component-name-too-long-fake-fakefakefakefakefake" + "example-component-name-too-long-fake-fakefakefakefakefake", ) assert name == "example-component-name-too-long-fake-fakefakefakefak" assert len(name) == 52 diff --git a/tests/component_handlers/kafka_connect/test_connect_handler.py b/tests/component_handlers/kafka_connect/test_connect_handler.py index a5a1f3246..ff6b7068c 100644 --- a/tests/component_handlers/kafka_connect/test_connect_handler.py +++ b/tests/component_handlers/kafka_connect/test_connect_handler.py @@ -25,25 +25,25 @@ class TestConnectorHandler: @pytest.fixture def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.info" + "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.info", ) @pytest.fixture def log_warning_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.warning" + "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.warning", ) @pytest.fixture def log_error_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.error" + "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.error", ) @pytest.fixture def renderer_diff_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.kafka_connect.kafka_connect_handler.render_diff" + "kpops.component_handlers.kafka_connect.kafka_connect_handler.render_diff", ) @staticmethod @@ -59,7 +59,7 @@ def connector_config(self) -> KafkaConnectorConfig: **{ "connector.class": "com.bakdata.connect.TestConnector", "name": CONNECTOR_NAME, - } + }, ) def test_should_create_connector_in_dry_run( @@ -75,15 +75,15 @@ def test_should_create_connector_in_dry_run( handler.create_connector(connector_config, dry_run=True) connector_wrapper.get_connector.assert_called_once_with(CONNECTOR_NAME) connector_wrapper.validate_connector_config.assert_called_once_with( - connector_config + connector_config, ) assert log_info_mock.mock_calls == [ mock.call.log_info( - f"Connector Creation: connector {CONNECTOR_NAME} already exists." + f"Connector Creation: connector {CONNECTOR_NAME} already exists.", ), mock.call.log_info( - f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!" + f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!", ), ] @@ -109,10 +109,10 @@ def test_should_log_correct_message_when_create_connector_and_connector_not_exis assert log_info_mock.mock_calls == [ mock.call( - f"Connector Creation: connector {CONNECTOR_NAME} does not exist. Creating connector with config:\n\x1b[32m+ connector.class: org.apache.kafka.connect.file.FileStreamSinkConnector\n\x1b[0m\x1b[32m+ name: {CONNECTOR_NAME}\n\x1b[0m\x1b[32m+ tasks.max: '1'\n\x1b[0m\x1b[32m+ topics: {TOPIC_NAME}\n\x1b[0m" + f"Connector Creation: connector {CONNECTOR_NAME} does not exist. Creating connector with config:\n\x1b[32m+ connector.class: org.apache.kafka.connect.file.FileStreamSinkConnector\n\x1b[0m\x1b[32m+ name: {CONNECTOR_NAME}\n\x1b[0m\x1b[32m+ tasks.max: '1'\n\x1b[0m\x1b[32m+ topics: {TOPIC_NAME}\n\x1b[0m", ), mock.call( - f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!" + f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!", ), ] @@ -134,7 +134,7 @@ def test_should_log_correct_message_when_create_connector_and_connector_exists_i "tasks": [], } connector_wrapper.get_connector.return_value = KafkaConnectResponse( - **actual_response + **actual_response, ) configs = { @@ -147,23 +147,23 @@ def test_should_log_correct_message_when_create_connector_and_connector_exists_i handler.create_connector(connector_config, dry_run=True) connector_wrapper.get_connector.assert_called_once_with(CONNECTOR_NAME) connector_wrapper.validate_connector_config.assert_called_once_with( - connector_config + connector_config, ) assert log_info_mock.mock_calls == [ mock.call( - f"Connector Creation: connector {CONNECTOR_NAME} already exists." + f"Connector Creation: connector {CONNECTOR_NAME} already exists.", ), mock.call( - f"Updating config:\n connector.class: org.apache.kafka.connect.file.FileStreamSinkConnector\n name: {CONNECTOR_NAME}\n\x1b[31m- tasks.max: '1'\n\x1b[0m\x1b[33m? ^\n\x1b[0m\x1b[32m+ tasks.max: '2'\n\x1b[0m\x1b[33m? ^\n\x1b[0m topics: {TOPIC_NAME}\n" + f"Updating config:\n connector.class: org.apache.kafka.connect.file.FileStreamSinkConnector\n name: {CONNECTOR_NAME}\n\x1b[31m- tasks.max: '1'\n\x1b[0m\x1b[33m? ^\n\x1b[0m\x1b[32m+ tasks.max: '2'\n\x1b[0m\x1b[33m? ^\n\x1b[0m topics: {TOPIC_NAME}\n", ), mock.call( - f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!" + f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!", ), ] def test_should_log_invalid_config_when_create_connector_in_dry_run( - self, connector_config: KafkaConnectorConfig, renderer_diff_mock: MagicMock + self, connector_config: KafkaConnectorConfig, renderer_diff_mock: MagicMock, ): connector_wrapper = MagicMock() @@ -184,11 +184,11 @@ def test_should_log_invalid_config_when_create_connector_in_dry_run( handler.create_connector(connector_config, dry_run=True) connector_wrapper.validate_connector_config.assert_called_once_with( - connector_config + connector_config, ) def test_should_call_update_connector_config_when_connector_exists_not_dry_run( - self, connector_config: KafkaConnectorConfig + self, connector_config: KafkaConnectorConfig, ): connector_wrapper = MagicMock() handler = self.connector_handler(connector_wrapper) @@ -201,7 +201,7 @@ def test_should_call_update_connector_config_when_connector_exists_not_dry_run( ] def test_should_call_create_connector_when_connector_does_not_exists_not_dry_run( - self, connector_config: KafkaConnectorConfig + self, connector_config: KafkaConnectorConfig, ): connector_wrapper = MagicMock() @@ -224,8 +224,8 @@ def test_should_print_correct_log_when_destroying_connector_in_dry_run( log_info_mock.assert_called_once_with( magentaify( - f"Connector Destruction: connector {CONNECTOR_NAME} already exists. Deleting connector." - ) + f"Connector Destruction: connector {CONNECTOR_NAME} already exists. Deleting connector.", + ), ) def test_should_print_correct_warning_log_when_destroying_connector_and_connector_exists_in_dry_run( @@ -240,7 +240,7 @@ def test_should_print_correct_warning_log_when_destroying_connector_and_connecto handler.destroy_connector(CONNECTOR_NAME, dry_run=True) log_warning_mock.assert_called_once_with( - f"Connector Destruction: connector {CONNECTOR_NAME} does not exist and cannot be deleted. Skipping." + f"Connector Destruction: connector {CONNECTOR_NAME} does not exist and cannot be deleted. Skipping.", ) def test_should_call_delete_connector_when_destroying_existing_connector_not_dry_run( @@ -267,5 +267,5 @@ def test_should_print_correct_warning_log_when_destroying_connector_and_connecto handler.destroy_connector(CONNECTOR_NAME, dry_run=False) log_warning_mock.assert_called_once_with( - f"Connector Destruction: the connector {CONNECTOR_NAME} does not exist. Skipping." + f"Connector Destruction: the connector {CONNECTOR_NAME} does not exist. Skipping.", ) diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 3db9c090f..d4dd13664 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -40,7 +40,7 @@ def connector_config(self) -> KafkaConnectorConfig: **{ "connector.class": "com.bakdata.connect.TestConnector", "name": "test-connector", - } + }, ) def test_should_through_exception_when_host_is_not_set(self): @@ -58,7 +58,7 @@ def test_should_through_exception_when_host_is_not_set(self): @patch("httpx.post") def test_should_create_post_requests_for_given_connector_configuration( - self, mock_post: MagicMock + self, mock_post: MagicMock, ): configs = { "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", @@ -84,7 +84,7 @@ def test_should_create_post_requests_for_given_connector_configuration( ) def test_should_return_correct_response_when_connector_created( - self, httpx_mock: HTTPXMock, connector_config: KafkaConnectorConfig + self, httpx_mock: HTTPXMock, connector_config: KafkaConnectorConfig, ): actual_response = { "name": "hdfs-sink-connector", @@ -135,7 +135,7 @@ def test_should_raise_connector_exists_exception_when_connector_exists( ) log_warning.assert_called_with( - "Rebalancing in progress while creating a connector... Retrying..." + "Rebalancing in progress while creating a connector... Retrying...", ) @patch("httpx.get") @@ -152,7 +152,7 @@ def test_should_create_correct_get_connector_request(self, mock_get: MagicMock): @pytest.mark.flaky(reruns=5, condition=sys.platform.startswith("win32")) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_return_correct_response_when_getting_connector( - self, log_info: MagicMock, httpx_mock: HTTPXMock + self, log_info: MagicMock, httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -187,7 +187,7 @@ def test_should_return_correct_response_when_getting_connector( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_raise_connector_not_found_when_getting_connector( - self, log_info: MagicMock, httpx_mock: HTTPXMock + self, log_info: MagicMock, httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -202,12 +202,12 @@ def test_should_raise_connector_not_found_when_getting_connector( self.connect_wrapper.get_connector(connector_name) log_info.assert_called_once_with( - f"The named connector {connector_name} does not exists." + f"The named connector {connector_name} does not exists.", ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") def test_should_raise_rebalance_in_progress_when_getting_connector( - self, log_warning: MagicMock, httpx_mock: HTTPXMock + self, log_warning: MagicMock, httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -225,7 +225,7 @@ def test_should_raise_rebalance_in_progress_when_getting_connector( ) log_warning.assert_called_with( - "Rebalancing in progress while getting a connector... Retrying..." + "Rebalancing in progress while getting a connector... Retrying...", ) @patch("httpx.put") @@ -243,7 +243,7 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc } with pytest.raises(KafkaConnectError): self.connect_wrapper.update_connector_config( - KafkaConnectorConfig(**configs) + KafkaConnectorConfig(**configs), ) mock_put.assert_called_with( @@ -287,11 +287,11 @@ def test_should_return_correct_response_when_update_connector( status_code=200, ) expected_response = self.connect_wrapper.update_connector_config( - connector_config + connector_config, ) assert KafkaConnectResponse(**actual_response) == expected_response log_info.assert_called_once_with( - f"Config for connector {connector_name} updated." + f"Config for connector {connector_name} updated.", ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") @@ -329,7 +329,7 @@ def test_should_return_correct_response_when_update_connector_created( status_code=201, ) expected_response = self.connect_wrapper.update_connector_config( - connector_config + connector_config, ) assert KafkaConnectResponse(**actual_response) == expected_response log_info.assert_called_once_with(f"Connector {connector_name} created.") @@ -357,12 +357,12 @@ def test_should_raise_connector_exists_exception_when_update_connector( ) log_warning.assert_called_with( - "Rebalancing in progress while updating a connector... Retrying..." + "Rebalancing in progress while updating a connector... Retrying...", ) @patch("httpx.delete") def test_should_create_correct_delete_connector_request( - self, mock_delete: MagicMock + self, mock_delete: MagicMock, ): connector_name = "test-connector" with pytest.raises(KafkaConnectError): @@ -375,7 +375,7 @@ def test_should_create_correct_delete_connector_request( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_return_correct_response_when_deleting_connector( - self, log_info: MagicMock, httpx_mock: HTTPXMock + self, log_info: MagicMock, httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -410,7 +410,7 @@ def test_should_return_correct_response_when_deleting_connector( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_raise_connector_not_found_when_deleting_connector( - self, log_info: MagicMock, httpx_mock: HTTPXMock + self, log_info: MagicMock, httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -425,12 +425,12 @@ def test_should_raise_connector_not_found_when_deleting_connector( self.connect_wrapper.delete_connector(connector_name) log_info.assert_called_once_with( - f"The named connector {connector_name} does not exists." + f"The named connector {connector_name} does not exists.", ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") def test_should_raise_rebalance_in_progress_when_deleting_connector( - self, log_warning: MagicMock, httpx_mock: HTTPXMock + self, log_warning: MagicMock, httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -448,12 +448,12 @@ def test_should_raise_rebalance_in_progress_when_deleting_connector( ) log_warning.assert_called_with( - "Rebalancing in progress while deleting a connector... Retrying..." + "Rebalancing in progress while deleting a connector... Retrying...", ) @patch("httpx.put") def test_should_create_correct_validate_connector_config_request( - self, mock_put: MagicMock + self, mock_put: MagicMock, ): connector_config = KafkaConnectorConfig( **{ @@ -461,7 +461,7 @@ def test_should_create_correct_validate_connector_config_request( "name": "FileStreamSinkConnector", "tasks.max": "1", "topics": "test-topic", - } + }, ) with pytest.raises(KafkaConnectError): self.connect_wrapper.validate_connector_config(connector_config) @@ -474,7 +474,7 @@ def test_should_create_correct_validate_connector_config_request( @patch("httpx.put") def test_should_create_correct_validate_connector_config_and_name_gets_added( - self, mock_put: MagicMock + self, mock_put: MagicMock, ): connector_name = "FileStreamSinkConnector" configs = { @@ -485,7 +485,7 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( } with pytest.raises(KafkaConnectError): self.connect_wrapper.validate_connector_config( - KafkaConnectorConfig(**configs) + KafkaConnectorConfig(**configs), ) mock_put.assert_called_with( @@ -514,9 +514,9 @@ def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): "topics": "test-topic", } errors = self.connect_wrapper.validate_connector_config( - KafkaConnectorConfig(**configs) + KafkaConnectorConfig(**configs), ) assert errors == [ - "Found error for field file: Missing required configuration 'file' which has no default value." + "Found error for field file: Missing required configuration 'file' which has no default value.", ] diff --git a/tests/component_handlers/schema_handler/resources/module.py b/tests/component_handlers/schema_handler/resources/module.py index 8c7168efa..4223179d3 100644 --- a/tests/component_handlers/schema_handler/resources/module.py +++ b/tests/component_handlers/schema_handler/resources/module.py @@ -9,6 +9,6 @@ class CustomSchemaProvider(SchemaProvider): def provide_schema( - self, schema_class: str, models: dict[ModelName, ModelVersion] + self, schema_class: str, models: dict[ModelName, ModelVersion], ) -> Schema: return AvroSchema({}) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index ccea021c6..bd5815b12 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -28,28 +28,28 @@ @pytest.fixture(autouse=True) def log_info_mock(mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.log.info" + "kpops.component_handlers.schema_handler.schema_handler.log.info", ) @pytest.fixture(autouse=True) def log_debug_mock(mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.log.debug" + "kpops.component_handlers.schema_handler.schema_handler.log.debug", ) @pytest.fixture(autouse=False) def find_class_mock(mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.find_class" + "kpops.component_handlers.schema_handler.schema_handler.find_class", ) @pytest.fixture(autouse=True) def schema_registry_mock(mocker: MockerFixture) -> MagicMock: schema_registry_mock = mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.SchemaRegistryClient" + "kpops.component_handlers.schema_handler.schema_handler.SchemaRegistryClient", ) return schema_registry_mock.return_value @@ -96,16 +96,16 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): schema_registry_url="http://localhost:8081", ) schema_handler = SchemaHandler.load_schema_handler( - TEST_SCHEMA_PROVIDER_MODULE, config_enable + TEST_SCHEMA_PROVIDER_MODULE, config_enable, ) assert schema_handler is not None schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", {} + "com.bakdata.kpops.test.SchemaHandlerTest", {}, ) schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SomeOtherSchemaClass", {} + "com.bakdata.kpops.test.SomeOtherSchemaClass", {}, ) find_class_mock.assert_called_once_with(TEST_SCHEMA_PROVIDER_MODULE, SchemaProvider) @@ -113,12 +113,12 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): def test_should_raise_value_error_if_schema_provider_class_not_found(): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=NON_EXISTING_PROVIDER_MODULE + url="http://mock:8081", components_module=NON_EXISTING_PROVIDER_MODULE, ) with pytest.raises(ValueError) as value_error: schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", {} + "com.bakdata.kpops.test.SchemaHandlerTest", {}, ) assert ( @@ -140,14 +140,14 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_ schema_handler = SchemaHandler.load_schema_handler(None, config_enable) assert schema_handler is not None schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", {} + "com.bakdata.kpops.test.SchemaHandlerTest", {}, ) with pytest.raises(ValueError) as value_error: schema_handler = SchemaHandler.load_schema_handler("", config_enable) assert schema_handler is not None schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", {} + "com.bakdata.kpops.test.SchemaHandlerTest", {}, ) assert ( @@ -157,10 +157,10 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( - to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock + to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -168,7 +168,7 @@ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( schema_handler.submit_schemas(to_section, True) log_info_mock.assert_called_once_with( - greenify("Schema Submission: The subject topic-X-value will be submitted.") + greenify("Schema Submission: The subject topic-X-value will be submitted."), ) schema_registry_mock.register.assert_not_called() @@ -180,7 +180,7 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [1, 2, 3] @@ -190,7 +190,7 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( schema_handler.submit_schemas(to_section, True) log_info_mock.assert_called_once_with( - f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}." + f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}.", ) schema_registry_mock.register.assert_not_called() @@ -202,7 +202,7 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -242,7 +242,7 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) @@ -255,13 +255,13 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve assert log_info_mock.mock_calls == [ mock.call( - f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}." + f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}.", ), ] assert log_debug_mock.mock_calls == [ mock.call( - f"Schema Submission: schema was already submitted for the subject topic-X-value as version {registered_version.schema}. Therefore, the specified schema must be compatible." + f"Schema Submission: schema was already submitted for the subject topic-X-value as version {registered_version.schema}. Therefore, the specified schema must be compatible.", ), ] @@ -278,7 +278,7 @@ def test_should_submit_non_existing_schema_when_not_dry( schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -287,12 +287,12 @@ def test_should_submit_non_existing_schema_when_not_dry( subject = "topic-X-value" log_info_mock.assert_called_once_with( - f"Schema Submission: schema submitted for {subject} with model {topic_config.value_schema}." + f"Schema Submission: schema submitted for {subject} with model {topic_config.value_schema}.", ) schema_registry_mock.get_versions.assert_not_called() schema_registry_mock.register.assert_called_once_with( - subject=subject, schema=schema + subject=subject, schema=schema, ) @@ -302,7 +302,7 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -310,17 +310,17 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( schema_handler.delete_schemas(to_section, True) log_info_mock.assert_called_once_with( - magentaify("Schema Deletion: will delete subject topic-X-value.") + magentaify("Schema Deletion: will delete subject topic-X-value."), ) schema_registry_mock.delete_subject.assert_not_called() def test_should_delete_schemas_when_not_in_dry_run( - to_section: ToSection, schema_registry_mock: MagicMock + to_section: ToSection, schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index 7b587ecb3..c8fb3e94e 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -32,12 +32,12 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def setup(self, httpx_mock: HTTPXMock): config = PipelineConfig( - defaults_path=DEFAULTS_PATH, environment="development", kafka_rest_host=HOST + defaults_path=DEFAULTS_PATH, environment="development", kafka_rest_host=HOST, ) self.proxy_wrapper = ProxyWrapper(pipeline_config=config) with open( - DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json" + DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json", ) as f: cluster_response = json.load(f) @@ -62,7 +62,7 @@ def test_should_raise_exception_when_host_is_not_set(self): @patch("httpx.post") def test_should_create_topic_with_all_topic_configuration( - self, mock_post: MagicMock + self, mock_post: MagicMock, ): topic_spec = { "topic_name": "topic-X", @@ -128,7 +128,7 @@ def test_should_call_batch_alter_topic_config(self, mock_put: MagicMock): "data": [ {"name": "cleanup.policy", "operation": "DELETE"}, {"name": "compression.type", "value": "gzip"}, - ] + ], }, ) @@ -155,7 +155,7 @@ def test_should_call_get_broker_config(self, mock_get: MagicMock): ) def test_should_log_topic_creation( - self, log_info_mock: MagicMock, httpx_mock: HTTPXMock + self, log_info_mock: MagicMock, httpx_mock: HTTPXMock, ): topic_spec = { "topic_name": "topic-X", @@ -178,7 +178,7 @@ def test_should_log_topic_creation( log_info_mock.assert_called_once_with("Topic topic-X created.") def test_should_log_topic_deletion( - self, log_info_mock: MagicMock, httpx_mock: HTTPXMock + self, log_info_mock: MagicMock, httpx_mock: HTTPXMock, ): topic_name = "topic-X" @@ -225,7 +225,7 @@ def test_should_get_topic(self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock assert get_topic_response == topic_response def test_should_rais_topic_not_found_exception_get_topic( - self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock + self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock, ): topic_name = "topic-X" @@ -244,7 +244,7 @@ def test_should_rais_topic_not_found_exception_get_topic( log_debug_mock.assert_any_call("Topic topic-X not found.") def test_should_log_reset_default_topic_config_when_deleted( - self, log_info_mock: MagicMock, httpx_mock: HTTPXMock + self, log_info_mock: MagicMock, httpx_mock: HTTPXMock, ): topic_name = "topic-X" config_name = "cleanup.policy" @@ -263,5 +263,5 @@ def test_should_log_reset_default_topic_config_when_deleted( ) log_info_mock.assert_called_once_with( - f"Config of topic {topic_name} was altered." + f"Config of topic {topic_name} was altered.", ) diff --git a/tests/component_handlers/topic/test_topic_handler.py b/tests/component_handlers/topic/test_topic_handler.py index c53a7a60d..a64a239a9 100644 --- a/tests/component_handlers/topic/test_topic_handler.py +++ b/tests/component_handlers/topic/test_topic_handler.py @@ -52,17 +52,17 @@ def log_error_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def get_topic_response_mock(self) -> MagicMock: with open( - DEFAULTS_PATH / "kafka_rest_proxy_responses/get_topic_response.json" + DEFAULTS_PATH / "kafka_rest_proxy_responses/get_topic_response.json", ) as f: response = json.load(f) with open( - DEFAULTS_PATH / "kafka_rest_proxy_responses/broker_response.json" + DEFAULTS_PATH / "kafka_rest_proxy_responses/broker_response.json", ) as f: broker_response = json.load(f) with open( - DEFAULTS_PATH / "kafka_rest_proxy_responses/topic_config_response.json" + DEFAULTS_PATH / "kafka_rest_proxy_responses/topic_config_response.json", ) as f: response_topic_config = json.load(f) @@ -70,19 +70,19 @@ def get_topic_response_mock(self) -> MagicMock: wrapper.get_topic.return_value = TopicResponse(**response) wrapper.get_broker_config.return_value = BrokerConfigResponse(**broker_response) wrapper.get_topic_config.return_value = TopicConfigResponse( - **response_topic_config + **response_topic_config, ) return wrapper @pytest.fixture(autouse=True) def get_default_topic_response_mock(self) -> MagicMock: with open( - DEFAULTS_PATH / "kafka_rest_proxy_responses/get_default_topic_response.json" + DEFAULTS_PATH / "kafka_rest_proxy_responses/get_default_topic_response.json", ) as f: response = json.load(f) with open( - DEFAULTS_PATH / "kafka_rest_proxy_responses/broker_response.json" + DEFAULTS_PATH / "kafka_rest_proxy_responses/broker_response.json", ) as f: broker_response = json.load(f) @@ -120,7 +120,7 @@ def test_should_call_create_topic_with_dry_run_false(self): wrapper.__dry_run_topic_creation.assert_not_called() def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_false( - self, get_topic_response_mock: MagicMock + self, get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -146,7 +146,7 @@ def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_fals wrapper.__dry_run_topic_creation.assert_not_called() def test_should_update_topic_config_when_one_config_changed( - self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock + self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -168,7 +168,7 @@ def test_should_update_topic_config_when_one_config_changed( ) def test_should_not_update_topic_config_when_config_not_changed( - self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock + self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -186,11 +186,11 @@ def test_should_not_update_topic_config_when_config_not_changed( wrapper.batch_alter_topic_config.assert_not_called() log_info_mock.assert_called_once_with( - "Topic Creation: config of topic topic-X didn't change. Skipping update." + "Topic Creation: config of topic topic-X didn't change. Skipping update.", ) def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( - self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock + self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -207,11 +207,11 @@ def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( wrapper.batch_alter_topic_config.assert_not_called() log_info_mock.assert_called_once_with( - "Topic Creation: config of topic topic-X didn't change. Skipping update." + "Topic Creation: config of topic topic-X didn't change. Skipping update.", ) def test_should_call_reset_topic_config_when_topic_exists_dry_run_false_and_topic_configs_change( - self, get_topic_response_mock: MagicMock + self, get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -251,7 +251,7 @@ def test_should_not_call_create_topics_with_dry_run_true_and_topic_not_exists(se wrapper.create_topic.assert_not_called() def test_should_print_message_with_dry_run_true_and_topic_not_exists( - self, log_info_mock: MagicMock + self, log_info_mock: MagicMock, ): wrapper = MagicMock() wrapper.get_topic.side_effect = TopicNotFoundException() @@ -271,8 +271,8 @@ def test_should_print_message_with_dry_run_true_and_topic_not_exists( log_info_mock.assert_called_once_with( greenify( - "Topic Creation: topic-X does not exist in the cluster. Creating topic." - ) + "Topic Creation: topic-X does not exist in the cluster. Creating topic.", + ), ) def test_should_print_message_if_dry_run_and_topic_exists_with_same_partition_count_and_replication_factor( @@ -295,19 +295,19 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_same_partition_co topic_handler.create_topics(to_section=to_section, dry_run=True) wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff assert log_info_mock.mock_calls == [ - mock.call("Topic Creation: topic-X already exists in cluster.") + mock.call("Topic Creation: topic-X already exists in cluster."), ] assert log_debug_mock.mock_calls == [ mock.call("HTTP/1.1 400 Bad Request"), mock.call({"Content-Type": "application/json"}), mock.call( - {"error_code": 40002, "message": "Topic 'topic-X' already exists."} + {"error_code": 40002, "message": "Topic 'topic-X' already exists."}, ), mock.call( - "Topic Creation: partition count of topic topic-X did not change. Current partitions count 10. Updating configs." + "Topic Creation: partition count of topic topic-X did not change. Current partitions count 10. Updating configs.", ), mock.call( - "Topic Creation: replication factor of topic topic-X did not change. Current replication factor 3. Updating configs." + "Topic Creation: replication factor of topic topic-X did not change. Current replication factor 3. Updating configs.", ), ] @@ -331,7 +331,7 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition assert log_info_mock.mock_calls == [ mock.call("Config changes for topic topic-X:"), mock.call( - "\n\x1b[32m+ cleanup.policy: compact\n\x1b[0m\x1b[32m+ compression.type: gzip\n\x1b[0m" + "\n\x1b[32m+ cleanup.policy: compact\n\x1b[0m\x1b[32m+ compression.type: gzip\n\x1b[0m", ), mock.call("Topic Creation: topic-X already exists in cluster."), ] @@ -339,18 +339,18 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition mock.call("HTTP/1.1 400 Bad Request"), mock.call({"Content-Type": "application/json"}), mock.call( - {"error_code": 40002, "message": "Topic 'topic-X' already exists."} + {"error_code": 40002, "message": "Topic 'topic-X' already exists."}, ), mock.call( - "Topic Creation: partition count of topic topic-X did not change. Current partitions count 1. Updating configs." + "Topic Creation: partition count of topic topic-X did not change. Current partitions count 1. Updating configs.", ), mock.call( - "Topic Creation: replication factor of topic topic-X did not change. Current replication factor 1. Updating configs." + "Topic Creation: replication factor of topic topic-X did not change. Current replication factor 1. Updating configs.", ), ] def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( - self, get_topic_response_mock: MagicMock + self, get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -372,7 +372,7 @@ def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( - self, get_topic_response_mock: MagicMock + self, get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -394,7 +394,7 @@ def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff def test_should_log_correct_message_when_delete_existing_topic_dry_run( - self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock + self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -413,12 +413,12 @@ def test_should_log_correct_message_when_delete_existing_topic_dry_run( wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_info_mock.assert_called_once_with( magentaify( - "Topic Deletion: topic topic-X exists in the cluster. Deleting topic." - ) + "Topic Deletion: topic topic-X exists in the cluster. Deleting topic.", + ), ) def test_should_log_correct_message_when_delete_non_existing_topic_dry_run( - self, log_warning_mock: MagicMock + self, log_warning_mock: MagicMock, ): wrapper = MagicMock() wrapper.get_topic.side_effect = TopicNotFoundException @@ -437,7 +437,7 @@ def test_should_log_correct_message_when_delete_non_existing_topic_dry_run( wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_warning_mock.assert_called_once_with( - "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping." + "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping.", ) def test_should_call_delete_topic_not_dry_run(self): @@ -460,7 +460,7 @@ def test_should_call_delete_topic_not_dry_run(self): ] def test_should_print_correct_warning_when_deleting_topic_that_does_not_exists_not_dry_run( - self, log_warning_mock: MagicMock + self, log_warning_mock: MagicMock, ): wrapper = MagicMock() topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -478,5 +478,5 @@ def test_should_print_correct_warning_when_deleting_topic_that_does_not_exists_n wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_warning_mock.assert_called_once_with( - "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping." + "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping.", ) diff --git a/tests/component_handlers/topic/test_utils.py b/tests/component_handlers/topic/test_utils.py index b5f0133ca..0d3bd1170 100644 --- a/tests/component_handlers/topic/test_utils.py +++ b/tests/component_handlers/topic/test_utils.py @@ -86,7 +86,7 @@ "name": "log.flush.interval.messages", "source": "DEFAULT_CONFIG", "value": "9223372036854775807", - } + }, ], "topic_name": "fake", "value": "9223372036854775807", @@ -108,7 +108,7 @@ "name": "flush.ms", "source": "DEFAULT_CONFIG", "value": "9223372036854775807", - } + }, ], "topic_name": "fake", "value": "9223372036854775807", @@ -247,7 +247,7 @@ def test_get_effective_config(): ], }, ], - } + }, ) effective_config = get_effective_config( diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index 7b25e5f74..6e3e3d570 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -77,7 +77,7 @@ class TestBaseDefaultsComponent: ], ) def test_load_defaults( - self, component_class: type[BaseDefaultsComponent], defaults: dict + self, component_class: type[BaseDefaultsComponent], defaults: dict, ): assert ( load_defaults(component_class, DEFAULTS_PATH / "defaults.yaml") == defaults @@ -105,7 +105,7 @@ def test_load_defaults( ], ) def test_load_defaults_with_environment( - self, component_class: type[BaseDefaultsComponent], defaults: dict + self, component_class: type[BaseDefaultsComponent], defaults: dict, ): assert ( load_defaults( @@ -117,7 +117,7 @@ def test_load_defaults_with_environment( ) def test_inherit_defaults( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: PipelineConfig, handlers: ComponentHandlers, ): component = Child(config=config, handlers=handlers) @@ -125,7 +125,7 @@ def test_inherit_defaults( component.name == "fake-child-name" ), "Child default should overwrite parent default" assert component.nice == { - "fake-value": "fake" + "fake-value": "fake", }, "Field introduce by child should be added" assert ( component.value == 2.0 @@ -148,7 +148,7 @@ def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): component.name == "name-defined-in-pipeline_generator" ), "Kwargs should should overwrite all other values" assert component.nice == { - "fake-value": "fake" + "fake-value": "fake", }, "Field introduce by child should be added" assert ( component.value == 2.0 @@ -161,7 +161,7 @@ def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): ), "Defaults in code should be kept for parents" def test_multiple_generations( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: PipelineConfig, handlers: ComponentHandlers, ): component = GrandChild(config=config, handlers=handlers) @@ -169,7 +169,7 @@ def test_multiple_generations( component.name == "fake-child-name" ), "Child default should overwrite parent default" assert component.nice == { - "fake-value": "fake" + "fake-value": "fake", }, "Field introduce by child should be added" assert ( component.value == 2.0 @@ -183,11 +183,11 @@ def test_multiple_generations( assert component.grand_child == "grand-child-value" def test_env_var_substitution( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: PipelineConfig, handlers: ComponentHandlers, ): ENV["pipeline_name"] = str(DEFAULTS_PATH) component = EnvVarTest(config=config, handlers=handlers) assert component.name == str( - DEFAULTS_PATH + DEFAULTS_PATH, ), "Environment variables should be substituted" diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index c6527c00c..aae796153 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -80,7 +80,7 @@ def test_should_deploy_kafka_app( ) helm_upgrade_install = mocker.patch.object(kafka_app.helm, "upgrade_install") print_helm_diff = mocker.patch.object( - kafka_app.dry_run_handler, "print_helm_diff" + kafka_app.dry_run_handler, "print_helm_diff", ) mocker.patch.object( KafkaApp, diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 912f449fb..e22d26d52 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -42,13 +42,13 @@ def handlers(self) -> ComponentHandlers: @pytest.fixture(autouse=True) def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.components.base_components.kafka_connector.Helm" + "kpops.components.base_components.kafka_connector.Helm", ).return_value @pytest.fixture def dry_run_handler(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.components.base_components.kafka_connector.DryRunHandler" + "kpops.components.base_components.kafka_connector.DryRunHandler", ).return_value @pytest.fixture @@ -57,7 +57,7 @@ def connector_config(self) -> KafkaConnectorConfig: **{ "connector.class": CONNECTOR_CLASS, "name": CONNECTOR_FULL_NAME, - } + }, ) def test_connector_config_name_override( @@ -85,7 +85,7 @@ def test_connector_config_name_override( assert connector.app.name == CONNECTOR_FULL_NAME with pytest.raises( - ValueError, match="Connector name should be the same as component name" + ValueError, match="Connector name should be the same as component name", ): KafkaConnector( name=CONNECTOR_NAME, @@ -96,7 +96,7 @@ def test_connector_config_name_override( ) with pytest.raises( - ValueError, match="Connector name should be the same as component name" + ValueError, match="Connector name should be the same as component name", ): KafkaConnector( name=CONNECTOR_NAME, diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 91760e90c..a0650c633 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -55,9 +55,9 @@ def connector( to=ToSection( topics={ TopicName("${output_topic_name}"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), - } + }, ), ) @@ -73,7 +73,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics": topic_name} + **{**connector_config.dict(), "topics": topic_name}, ), namespace="test-namespace", ) @@ -85,7 +85,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics.regex": topic_pattern} + **{**connector_config.dict(), "topics.regex": topic_pattern}, ), namespace="test-namespace", ) @@ -109,7 +109,7 @@ def test_from_section_parsing_input_topic( topics={ topic1: FromTopic(type=InputTopicTypes.INPUT), topic2: FromTopic(type=InputTopicTypes.INPUT), - } + }, ), ) assert getattr(connector.app, "topics") == f"{topic1},{topic2}" @@ -132,7 +132,7 @@ def test_from_section_parsing_input_pattern( app=connector_config, namespace="test-namespace", from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type - topics={topic_pattern: FromTopic(type=InputTopicTypes.PATTERN)} + topics={topic_pattern: FromTopic(type=InputTopicTypes.PATTERN)}, ), ) assert getattr(connector.app, "topics.regex") == topic_pattern @@ -143,10 +143,10 @@ def test_deploy_order( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - connector.handlers.topic_handler, "create_topics" + connector.handlers.topic_handler, "create_topics", ) mock_create_connector = mocker.patch.object( - connector.handlers.connector_handler, "create_connector" + connector.handlers.connector_handler, "create_connector", ) mock = mocker.MagicMock() @@ -164,13 +164,13 @@ def test_destroy( mocker: MockerFixture, ): mock_destroy_connector = mocker.patch.object( - connector.handlers.connector_handler, "destroy_connector" + connector.handlers.connector_handler, "destroy_connector", ) connector.destroy(dry_run=True) mock_destroy_connector.assert_called_once_with( - CONNECTOR_FULL_NAME, dry_run=True + CONNECTOR_FULL_NAME, dry_run=True, ) def test_reset_when_dry_run_is_true( @@ -191,10 +191,10 @@ def test_reset_when_dry_run_is_false( mocker: MockerFixture, ): mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics" + connector.handlers.topic_handler, "delete_topics", ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, "clean_connector" + connector.handlers.connector_handler, "clean_connector", ) mock = mocker.MagicMock() mock.attach_mock(mock_clean_connector, "mock_clean_connector") @@ -264,10 +264,10 @@ def test_clean_when_dry_run_is_false( mocker: MockerFixture, ): mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics" + connector.handlers.topic_handler, "delete_topics", ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, "clean_connector" + connector.handlers.connector_handler, "clean_connector", ) mock = mocker.MagicMock() @@ -281,13 +281,13 @@ def test_clean_when_dry_run_is_false( assert log_info_mock.mock_calls == [ call.log_info( magentaify( - f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {CONNECTOR_FULL_NAME}" - ) + f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {CONNECTOR_FULL_NAME}", + ), ), call.log_info( magentaify( - f"Connector Cleanup: deploy Connect {KafkaConnectorType.SINK.value} resetter for {CONNECTOR_FULL_NAME}" - ) + f"Connector Cleanup: deploy Connect {KafkaConnectorType.SINK.value} resetter for {CONNECTOR_FULL_NAME}", + ), ), call.log_info(magentaify("Connector Cleanup: uninstall Kafka Resetter.")), ] @@ -369,10 +369,10 @@ def test_clean_without_to_when_dry_run_is_false( ) mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics" + connector.handlers.topic_handler, "delete_topics", ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, "clean_connector" + connector.handlers.connector_handler, "clean_connector", ) mock = mocker.MagicMock() mock.attach_mock(mock_delete_topics, "mock_delete_topics") diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index db9a2dd77..18548df34 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -48,9 +48,9 @@ def connector( to=ToSection( topics={ TopicName("${output_topic_name}"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), - } + }, ), offset_topic="kafka-connect-offsets", ) @@ -71,9 +71,9 @@ def test_from_section_raises_exception( from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type topics={ TopicName("connector-topic"): FromTopic( - type=InputTopicTypes.INPUT + type=InputTopicTypes.INPUT, ), - } + }, ), ) @@ -83,11 +83,11 @@ def test_deploy_order( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - connector.handlers.topic_handler, "create_topics" + connector.handlers.topic_handler, "create_topics", ) mock_create_connector = mocker.patch.object( - connector.handlers.connector_handler, "create_connector" + connector.handlers.connector_handler, "create_connector", ) mock = mocker.MagicMock() @@ -108,13 +108,13 @@ def test_destroy( assert connector.handlers.connector_handler mock_destroy_connector = mocker.patch.object( - connector.handlers.connector_handler, "destroy_connector" + connector.handlers.connector_handler, "destroy_connector", ) connector.destroy(dry_run=True) mock_destroy_connector.assert_called_once_with( - CONNECTOR_FULL_NAME, dry_run=True + CONNECTOR_FULL_NAME, dry_run=True, ) def test_reset_when_dry_run_is_true( @@ -137,10 +137,10 @@ def test_reset_when_dry_run_is_false( ): assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics" + connector.handlers.topic_handler, "delete_topics", ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, "clean_connector" + connector.handlers.connector_handler, "clean_connector", ) mock = mocker.MagicMock() @@ -210,10 +210,10 @@ def test_clean_when_dry_run_is_false( assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics" + connector.handlers.topic_handler, "delete_topics", ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, "clean_connector" + connector.handlers.connector_handler, "clean_connector", ) mock = mocker.MagicMock() @@ -286,10 +286,10 @@ def test_clean_without_to_when_dry_run_is_false( assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics" + connector.handlers.topic_handler, "delete_topics", ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, "clean_connector" + connector.handlers.connector_handler, "clean_connector", ) mock = mocker.MagicMock() diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 46eb9795d..cc2b4d275 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -46,7 +46,7 @@ def handlers(self) -> ComponentHandlers: @pytest.fixture def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.components.base_components.kubernetes_app.Helm" + "kpops.components.base_components.kubernetes_app.Helm", ).return_value @pytest.fixture @@ -113,7 +113,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( app_value: KubernetesTestValue, ): repo_config = HelmRepoConfig( - repository_name="test-repo", url="https://test.com/charts/" + repository_name="test-repo", url="https://test.com/charts/", ) kubernetes_app = KubernetesApp( name="test-kubernetes-app", @@ -211,7 +211,7 @@ def test_should_call_helm_uninstall_when_destroying_kubernetes_app( kubernetes_app.destroy(True) helm_mock.uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-test-kubernetes-app", True + "test-namespace", "${pipeline_name}-test-kubernetes-app", True, ) log_info_mock.assert_called_once_with(magentaify(stdout)) @@ -224,7 +224,7 @@ def test_should_raise_value_error_when_name_is_not_valid( repo_config: HelmRepoConfig, ): with pytest.raises( - ValueError, match=r"The component name .* is invalid for Kubernetes." + ValueError, match=r"The component name .* is invalid for Kubernetes.", ): KubernetesApp( name="Not-Compatible*", @@ -236,7 +236,7 @@ def test_should_raise_value_error_when_name_is_not_valid( ) with pytest.raises( - ValueError, match=r"The component name .* is invalid for Kubernetes." + ValueError, match=r"The component name .* is invalid for Kubernetes.", ): KubernetesApp( name="snake_case*", diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 56d52a68b..216202b60 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -42,7 +42,7 @@ def config(self) -> PipelineConfig: @pytest.fixture def producer_app( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: PipelineConfig, handlers: ComponentHandlers, ) -> ProducerApp: return ProducerApp( name=self.PRODUCER_APP_NAME, @@ -58,9 +58,9 @@ def producer_app( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), - } + }, }, }, ) @@ -79,20 +79,20 @@ def test_output_topics(self, config: PipelineConfig, handlers: ComponentHandlers "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), "extra-topic-1": TopicConfig( role="first-extra-topic", partitions_count=10, ), - } + }, }, }, ) assert producer_app.app.streams.output_topic == "${output_topic_name}" assert producer_app.app.streams.extra_output_topics == { - "first-extra-topic": "extra-topic-1" + "first-extra-topic": "extra-topic-1", } def test_deploy_order_when_dry_run_is_false( @@ -101,11 +101,11 @@ def test_deploy_order_when_dry_run_is_false( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - producer_app.handlers.topic_handler, "create_topics" + producer_app.handlers.topic_handler, "create_topics", ) mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install" + producer_app.helm, "upgrade_install", ) mock = mocker.MagicMock() @@ -150,7 +150,7 @@ def test_destroy( producer_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-" + self.PRODUCER_APP_NAME, True + "test-namespace", "${pipeline_name}-" + self.PRODUCER_APP_NAME, True, ) def test_should_not_reset_producer_app( @@ -159,11 +159,11 @@ def test_should_not_reset_producer_app( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install" + producer_app.helm, "upgrade_install", ) mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock_helm_print_helm_diff = mocker.patch.object( - producer_app.dry_run_handler, "print_helm_diff" + producer_app.dry_run_handler, "print_helm_diff", ) mock = mocker.MagicMock() @@ -205,10 +205,10 @@ def test_should_not_reset_producer_app( ] def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( - self, mocker: MockerFixture, producer_app: ProducerApp + self, mocker: MockerFixture, producer_app: ProducerApp, ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install" + producer_app.helm, "upgrade_install", ) mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index dce2c7e96..8cc46d538 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -47,7 +47,7 @@ def config(self) -> PipelineConfig: @pytest.fixture def streams_app( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: PipelineConfig, handlers: ComponentHandlers, ) -> StreamsApp: return StreamsApp( name=self.STREAMS_APP_NAME, @@ -61,9 +61,9 @@ def streams_app( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), - } + }, }, }, ) @@ -91,7 +91,7 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): "type": "pattern", "role": "another-pattern", }, - } + }, }, }, ) @@ -102,7 +102,7 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): assert streams_app.app.streams.input_topics == ["example-input", "b", "a"] assert streams_app.app.streams.input_pattern == ".*" assert streams_app.app.streams.extra_input_patterns == { - "another-pattern": "example.*" + "another-pattern": "example.*", } helm_values = streams_app.to_helm_values() @@ -113,7 +113,7 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): assert "extraInputPatterns" in streams_config def test_no_empty_input_topic( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: PipelineConfig, handlers: ComponentHandlers, ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -127,7 +127,7 @@ def test_no_empty_input_topic( "from": { "topics": { ".*": {"type": "pattern"}, - } + }, }, }, ) @@ -160,8 +160,8 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle "topic-input": { "type": "input", "role": "role", - } - } + }, + }, }, }, ) @@ -182,14 +182,14 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle "topic-input": { "type": "error", "role": "role", - } - } + }, + }, }, }, ) def test_set_streams_output_from_to( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: PipelineConfig, handlers: ComponentHandlers, ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -203,10 +203,10 @@ def test_set_streams_output_from_to( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), "${error_topic_name}": TopicConfig( - type=OutputTopicTypes.ERROR, partitions_count=10 + type=OutputTopicTypes.ERROR, partitions_count=10, ), "extra-topic-1": TopicConfig( role="first-extra-topic", @@ -216,7 +216,7 @@ def test_set_streams_output_from_to( role="second-extra-topic", partitions_count=10, ), - } + }, }, }, ) @@ -228,7 +228,7 @@ def test_set_streams_output_from_to( assert streams_app.app.streams.error_topic == "${error_topic_name}" def test_weave_inputs_from_prev_component( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: PipelineConfig, handlers: ComponentHandlers, ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -246,19 +246,19 @@ def test_weave_inputs_from_prev_component( ToSection( topics={ TopicName("prev-output-topic"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), TopicName("b"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), TopicName("a"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), TopicName("prev-error-topic"): TopicConfig( - type=OutputTopicTypes.ERROR, partitions_count=10 + type=OutputTopicTypes.ERROR, partitions_count=10, ), - } - ) + }, + ), ) assert streams_app.app.streams.input_topics == ["prev-output-topic", "b", "a"] @@ -281,10 +281,10 @@ def test_deploy_order_when_dry_run_is_false( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10 + type=OutputTopicTypes.OUTPUT, partitions_count=10, ), "${error_topic_name}": TopicConfig( - type=OutputTopicTypes.ERROR, partitions_count=10 + type=OutputTopicTypes.ERROR, partitions_count=10, ), "extra-topic-1": TopicConfig( role="first-extra-topic", @@ -294,15 +294,15 @@ def test_deploy_order_when_dry_run_is_false( role="second-extra-topic", partitions_count=10, ), - } + }, }, }, ) mock_create_topics = mocker.patch.object( - streams_app.handlers.topic_handler, "create_topics" + streams_app.handlers.topic_handler, "create_topics", ) mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install" + streams_app.helm, "upgrade_install", ) mock = mocker.MagicMock() @@ -328,7 +328,7 @@ def test_deploy_order_when_dry_run_is_false( }, "outputTopic": "${output_topic_name}", "errorTopic": "${error_topic_name}", - } + }, }, HelmUpgradeInstallFlags( create_namespace=False, @@ -351,14 +351,14 @@ def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): streams_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-" + self.STREAMS_APP_NAME, True + "test-namespace", "${pipeline_name}-" + self.STREAMS_APP_NAME, True, ) def test_reset_when_dry_run_is_false( - self, streams_app: StreamsApp, mocker: MockerFixture + self, streams_app: StreamsApp, mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install" + streams_app.helm, "upgrade_install", ) mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") @@ -402,7 +402,7 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install" + streams_app.helm, "upgrade_install", ) mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index cb58d19f0..0432a2184 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -51,12 +51,12 @@ def inflate(self) -> list[PipelineComponent]: to=ToSection( topics={ TopicName("${component_type}"): TopicConfig( - type=OutputTopicTypes.OUTPUT + type=OutputTopicTypes.OUTPUT, ), TopicName("${component_name}"): TopicConfig( - type=None, role="test" + type=None, role="test", ), - } + }, ), ) inflate_steps.append(kafka_connector) @@ -67,9 +67,9 @@ def inflate(self) -> list[PipelineComponent]: to=ToSection( # type: ignore topics={ TopicName( - f"{self.full_name}-" + "${component_name}" - ): TopicConfig(type=OutputTopicTypes.OUTPUT) - } + f"{self.full_name}-" + "${component_name}", + ): TopicConfig(type=OutputTopicTypes.OUTPUT), + }, ).dict(), ) inflate_steps.append(streams_app) @@ -79,7 +79,7 @@ def inflate(self) -> list[PipelineComponent]: class TestSchemaProvider(SchemaProvider): def provide_schema( - self, schema_class: str, models: dict[ModelName, ModelVersion] + self, schema_class: str, models: dict[ModelName, ModelVersion], ) -> Schema: schema = { "type": "record", diff --git a/tests/pipeline/test_components_without_schema_handler/components.py b/tests/pipeline/test_components_without_schema_handler/components.py index d5684178c..9ea414a9d 100644 --- a/tests/pipeline/test_components_without_schema_handler/components.py +++ b/tests/pipeline/test_components_without_schema_handler/components.py @@ -33,7 +33,7 @@ def inflate(self) -> list[PipelineComponent]: **{ "topics": topic_name, "transforms.changeTopic.replacement": f"{topic_name}-index-v1", - } + }, ), ) inflate_steps.append(kafka_connector) diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index af9cde479..41208322b 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -49,7 +49,7 @@ def test_load_pipeline(self, snapshot: SnapshotTest): snapshot.assert_match(enriched_pipeline, "test-pipeline") def test_generate_with_steps_flag_should_write_log_warning( - self, caplog: pytest.LogCaptureFixture + self, caplog: pytest.LogCaptureFixture, ): result = runner.invoke( app, @@ -73,7 +73,7 @@ def test_generate_with_steps_flag_should_write_log_warning( logging.WARNING, "The following flags are considered only when `--template` is set: \n \ '--steps'", - ) + ), ] assert result.exit_code == 0 diff --git a/tests/utils/test_dict_ops.py b/tests/utils/test_dict_ops.py index 1ea410770..e9a02fe5b 100644 --- a/tests/utils/test_dict_ops.py +++ b/tests/utils/test_dict_ops.py @@ -70,7 +70,7 @@ class SimpleModel(BaseModel): }, }, problems=99, - ).json() + ).json(), ) existing_substitution = { "key1": "Everything", diff --git a/tests/utils/test_diff.py b/tests/utils/test_diff.py index f2ffeac88..81b66b2cd 100644 --- a/tests/utils/test_diff.py +++ b/tests/utils/test_diff.py @@ -186,7 +186,7 @@ def test_render_diff(d1: dict, d2: dict, ignore: set[str] | None, output: str | diff_type=DiffType.CHANGE, key="a.b", change=Change(old_value=1, new_value=2), - ) + ), ], ), ], diff --git a/tests/utils/test_environment.py b/tests/utils/test_environment.py index 09bbb75de..88e84707a 100644 --- a/tests/utils/test_environment.py +++ b/tests/utils/test_environment.py @@ -91,7 +91,7 @@ def test_windows_behaviour_keys_transformation(system, fake_environment_windows) @patch("platform.system") def test_windows_behaviour_keys_transformation_as_kwargs( - system, fake_environment_windows + system, fake_environment_windows, ): system.return_value = "Windows" environment = Environment(**fake_environment_windows) From c7b325193f3f403467d37408fcfd78abc2b535ae Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 00:07:36 +0300 Subject: [PATCH 20/76] chore(ruff-comprehensions): ignore "C408" on certain lines due to conflict with pyright --- kpops/utils/gen_schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 51c64ce2c..cbcf4beaa 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -69,7 +69,7 @@ def _add_components( :return: Extended tuple """ if components is None: - components = tuple() + components = tuple() # noqa: C408 # Set of existing types, against which to check the new ones defined_component_types = {component.type for component in components} custom_components = ( @@ -95,7 +95,7 @@ def gen_pipeline_schema( log.warning("No components are provided, no schema is generated.") return # Add stock components if enabled - components: tuple[type[PipelineComponent]] = tuple() + components: tuple[type[PipelineComponent]] = tuple() # noqa: C408 if include_stock_components: components = _add_components("kpops.components") # Add custom components if provided From 0c2c7c18432ec97a013645524062999f15bfb6ff Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 00:09:04 +0300 Subject: [PATCH 21/76] ci(ruff-errmsg): autofix "EM" --- kpops/cli/registry.py | 3 ++- kpops/component_handlers/helm_wrapper/helm.py | 6 +++-- .../component_handlers/helm_wrapper/model.py | 3 ++- .../kafka_connect/kafka_connect_handler.py | 3 ++- .../component_handlers/kafka_connect/model.py | 3 ++- .../schema_handler/schema_handler.py | 10 ++++---- kpops/component_handlers/topic/handler.py | 6 +++-- .../component_handlers/topic/proxy_wrapper.py | 3 ++- .../base_defaults_component.py | 3 ++- .../base_components/kafka_connector.py | 6 +++-- .../base_components/kubernetes_app.py | 6 +++-- .../base_components/models/from_section.py | 3 ++- .../base_components/models/to_section.py | 3 ++- .../producer/producer_app.py | 3 ++- kpops/pipeline_generator/pipeline.py | 24 ++++++++++++------- kpops/utils/dict_differ.py | 3 ++- kpops/utils/dict_ops.py | 6 +++-- kpops/utils/gen_schema.py | 6 +++-- kpops/utils/yaml_loading.py | 3 ++- 19 files changed, 68 insertions(+), 35 deletions(-) diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index fc40f8938..30a9c1205 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -38,8 +38,9 @@ def __getitem__(self, component_type: str) -> type[PipelineComponent]: try: return self._classes[component_type] except KeyError as ke: + msg = f"Could not find a component of type {component_type}" raise ClassNotFoundError( - f"Could not find a component of type {component_type}", + msg, ) from ke diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 0eb519cb6..f717f2e6d 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -29,8 +29,9 @@ def __init__(self, helm_config: HelmConfig) -> None: self._debug = helm_config.debug self._version = self.get_version() if self._version.major != 3: + msg = f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}" raise RuntimeError( - f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}", + msg, ) def add_repo( @@ -183,8 +184,9 @@ def get_version(self) -> Version: short_version = self.__execute(command) version_match = re.search(r"^v(\d+(?:\.\d+){0,2})", short_version) if version_match is None: + msg = f"Could not parse the Helm version.\n\nHelm output:\n{short_version}" raise RuntimeError( - f"Could not parse the Helm version.\n\nHelm output:\n{short_version}", + msg, ) version = map(int, version_match.group(1).split(".")) return Version(*version) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 93e0116e2..8c6c09c32 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -180,7 +180,8 @@ def parse_source(source: str) -> str: # Source: chart/templates/serviceaccount.yaml """ if not source.startswith(HELM_SOURCE_PREFIX): - raise ParseError("Not a valid Helm template source") + msg = "Not a valid Helm template source" + raise ParseError(msg) return source.removeprefix(HELM_SOURCE_PREFIX).strip() @classmethod diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index fb81aa411..27aad212f 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -114,8 +114,9 @@ def __dry_run_connector_creation( errors = self._connect_wrapper.validate_connector_config(connector_config) if len(errors) > 0: formatted_errors = "\n".join(errors) + msg = f"Connector Creation: validating the connector config for connector {connector_name} resulted in the following errors: {formatted_errors}" raise ConnectorStateException( - f"Connector Creation: validating the connector config for connector {connector_name} resulted in the following errors: {formatted_errors}", + msg, ) else: log.info( diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 99964d3c5..e83e33e5d 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -31,7 +31,8 @@ def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None: @validator("connector_class") def connector_class_must_contain_dot(cls, connector_class: str) -> str: if "." not in connector_class: - raise ValueError(f"Invalid connector class {connector_class}") + msg = f"Invalid connector class {connector_class}" + raise ValueError(msg) return connector_class @property diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index 8c2065f7a..9bf068438 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -29,15 +29,16 @@ def __init__(self, url: str, components_module: str | None): def schema_provider(self) -> SchemaProvider: try: if not self.components_module: + msg = f"The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your {SchemaProvider.__name__} implementation exists." raise ValueError( - f"The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your {SchemaProvider.__name__} implementation exists.", + msg, ) schema_provider_class = find_class(self.components_module, SchemaProvider) return schema_provider_class() # pyright: ignore[reportGeneralTypeIssues] except ClassNotFoundError as e: + msg = f"No schema provider found in components module {self.components_module}. Please implement the abstract method in {SchemaProvider.__module__}.{SchemaProvider.__name__}." raise ValueError( - f"No schema provider found in components module {self.components_module}. " - f"Please implement the abstract method in {SchemaProvider.__module__}.{SchemaProvider.__name__}.", + msg, ) from e @classmethod @@ -144,8 +145,9 @@ def __check_compatibility( if isinstance(schema, AvroSchema) else str(schema) ) + msg = f"Schema is not compatible for {subject} and model {schema_class}. \n {json.dumps(schema_str, indent=4)}" raise Exception( - f"Schema is not compatible for {subject} and model {schema_class}. \n {json.dumps(schema_str, indent=4)}", + msg, ) else: log.debug( diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index b436b20e3..38de9c66b 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -148,8 +148,9 @@ def __check_partition_count( f"Topic Creation: partition count of topic {topic_name} did not change. Current partitions count {partition_count}. Updating configs.", ) else: + msg = f"Topic Creation: partition count of topic {topic_name} changed! Partitions count of topic {topic_name} is {partition_count}. The given partitions count {topic_spec.partitions_count}." raise TopicTransactionError( - f"Topic Creation: partition count of topic {topic_name} changed! Partitions count of topic {topic_name} is {partition_count}. The given partitions count {topic_spec.partitions_count}.", + msg, ) @staticmethod @@ -168,8 +169,9 @@ def __check_replication_factor( f"Topic Creation: replication factor of topic {topic_name} did not change. Current replication factor {replication_factor}. Updating configs.", ) else: + msg = f"Topic Creation: replication factor of topic {topic_name} changed! Replication factor of topic {topic_name} is {replication_factor}. The given replication count {topic_spec.replication_factor}." raise TopicTransactionError( - f"Topic Creation: replication factor of topic {topic_name} changed! Replication factor of topic {topic_name} is {replication_factor}. The given replication count {topic_spec.replication_factor}.", + msg, ) def __dry_run_topic_deletion(self, topic_name: str) -> None: diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 1a36e4b50..0d7ad2ebb 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -25,8 +25,9 @@ class ProxyWrapper: def __init__(self, pipeline_config: PipelineConfig) -> None: if not pipeline_config.kafka_rest_host: + msg = "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST." raise ValueError( - "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST.", + msg, ) self._host = pipeline_config.kafka_rest_host diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index e789182ff..4cae46ce4 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -165,8 +165,9 @@ def defaults_from_yaml(path: Path, key: str) -> dict: """ content = load_yaml_file(path, substitution=ENV) if not isinstance(content, dict): + msg = "Default files should be structured as map ([app type] -> [default config]" raise TypeError( - "Default files should be structured as map ([app type] -> [default config]", + msg, ) value = content.get(key) if value is None: diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 2d6c8c8bc..584096990 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -85,7 +85,8 @@ def connector_config_should_have_component_name( component_name = values["prefix"] + values["name"] connector_name: str | None = app.get("name") if connector_name is not None and connector_name != component_name: - raise ValueError("Connector name should be the same as component name") + msg = "Connector name should be the same as component name" + raise ValueError(msg) app["name"] = component_name return app @@ -280,7 +281,8 @@ class KafkaSourceConnector(KafkaConnector): @override def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: - raise NotImplementedError("Kafka source connector doesn't support FromSection") + msg = "Kafka source connector doesn't support FromSection" + raise NotImplementedError(msg) @override def template(self) -> None: diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index b2676da7c..021f6d379 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -96,8 +96,9 @@ def helm_release_name(self) -> str: @property def helm_chart(self) -> str: """Return component's Helm chart.""" + msg = f"Please implement the helm_chart property of the {self.__module__} module." raise NotImplementedError( - f"Please implement the helm_chart property of the {self.__module__} module.", + msg, ) @property @@ -193,7 +194,8 @@ def validate_kubernetes_name(name: str) -> None: :raises ValueError: The component name {name} is invalid for Kubernetes. """ if not bool(KUBERNETES_NAME_CHECK_PATTERN.match(name)): - raise ValueError(f"The component name {name} is invalid for Kubernetes.") + msg = f"The component name {name} is invalid for Kubernetes." + raise ValueError(msg) @override def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index aea159eb2..a53708c94 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -39,7 +39,8 @@ class Config(DescConfig): def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: """Ensure that cls.role is used correctly, assign type if needed.""" if values["type"] == InputTopicTypes.INPUT and values["role"]: - raise ValueError("Define role only if `type` is `pattern` or `None`") + msg = "Define role only if `type` is `pattern` or `None`" + raise ValueError(msg) return values diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 00393ee4e..843321916 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -67,7 +67,8 @@ class Config(DescConfig): def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: """Ensure that cls.role is used correctly, assign type if needed.""" if values["type"] and values["role"]: - raise ValueError("Define `role` only if `type` is undefined") + msg = "Define `role` only if `type` is undefined" + raise ValueError(msg) return values diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 3a513c5a5..08e621019 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -40,7 +40,8 @@ class ProducerApp(KafkaApp): def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: match topic.type: case OutputTopicTypes.ERROR: - raise ValueError("Producer apps do not support error topics") + msg = "Producer apps do not support error topics" + raise ValueError(msg) case _: super().apply_to_outputs(name, topic) diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 0ed15e9b1..b11e933fb 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -44,7 +44,8 @@ def find(self, component_name: str) -> PipelineComponent: for component in self.components: if component_name == component.name: return component - raise ValueError(f"Component {component_name} not found") + msg = f"Component {component_name} not found" + raise ValueError(msg) def add(self, component: PipelineComponent) -> None: self._populate_component_name(component) @@ -63,8 +64,9 @@ def validate_unique_names(self) -> None: step_names = [component.full_name for component in self.components] duplicates = [name for name, count in Counter(step_names).items() if count > 1] if duplicates: + msg = f"step names should be unique. duplicate step names: {', '.join(duplicates)}" raise ValidationError( - f"step names should be unique. duplicate step names: {', '.join(duplicates)}", + msg, ) @staticmethod @@ -87,8 +89,9 @@ def create_env_components_index( index: dict[str, dict] = {} for component in environment_components: if "type" not in component or "name" not in component: + msg = "To override components per environment, every component should at least have a type and a name." raise ValueError( - "To override components per environment, every component should at least have a type and a name.", + msg, ) index[component["name"]] = component return index @@ -137,15 +140,17 @@ def load_from_yaml( main_content = load_yaml_file(path, substitution=ENV) if not isinstance(main_content, list): + msg = f"The pipeline definition {path} should contain a list of components" raise TypeError( - f"The pipeline definition {path} should contain a list of components", + msg, ) env_content = [] if (env_file := Pipeline.pipeline_filename_environment(path, config)).exists(): env_content = load_yaml_file(env_file, substitution=ENV) if not isinstance(env_content, list): + msg = f"The pipeline definition {env_file} should contain a list of components" raise TypeError( - f"The pipeline definition {env_file} should contain a list of components", + msg, ) pipeline = cls(main_content, env_content, registry, config, handlers) @@ -164,15 +169,17 @@ def parse_components(self, component_list: list[dict]) -> None: try: component_type: str = component_data["type"] except KeyError as ke: + msg = "Every component must have a type defined, this component does not have one." raise ValueError( - "Every component must have a type defined, this component does not have one.", + msg, ) from ke component_class = self.registry[component_type] self.apply_component(component_class, component_data) except Exception as ex: # noqa: BLE001 if "name" in component_data: + msg = f"Error enriching {component_data['type']} component {component_data['name']}" raise ParsingException( - f"Error enriching {component_data['type']} component {component_data['name']}", + msg, ) from ex else: raise ParsingException() from ex @@ -336,7 +343,8 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None: """ path_without_file = path.resolve().relative_to(base_dir.resolve()).parts[:-1] if not path_without_file: - raise ValueError("The pipeline-base-dir should not equal the pipeline-path") + msg = "The pipeline-base-dir should not equal the pipeline-path" + raise ValueError(msg) pipeline_name = "-".join(path_without_file) ENV["pipeline_name"] = pipeline_name for level, parent in enumerate(path_without_file): diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index da47bd620..005cd1e71 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -40,7 +40,8 @@ def factory(type: DiffType, change: T | tuple[T, T]) -> Change: return Change(change, None) case DiffType.CHANGE if isinstance(change, tuple): return Change(*change) - raise ValueError(f"{type} is not part of {DiffType}") + msg = f"{type} is not part of {DiffType}" + raise ValueError(msg) @dataclass diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index 0f4643043..c52ae8e42 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -57,11 +57,13 @@ def flatten_mapping( :returns: "Flattened" mapping in the form of dict """ if not isinstance(nested_mapping, Mapping): - raise TypeError("Argument nested_mapping is not a Mapping") + msg = "Argument nested_mapping is not a Mapping" + raise TypeError(msg) top: dict[str, Any] = {} for key, value in nested_mapping.items(): if not isinstance(key, str): - raise TypeError(f"Argument nested_mapping contains a non-str key: {key}") + msg = f"Argument nested_mapping contains a non-str key: {key}" + raise TypeError(msg) if prefix: key = prefix + separator + key if isinstance(value, Mapping): diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index cbcf4beaa..e04259403 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -26,7 +26,8 @@ class SchemaScope(str, Enum): # adapted from https://github.com/tiangolo/fastapi/issues/1378#issuecomment-764966955 def field_schema(field: ModelField, **kwargs: Any) -> Any: if field.field_info.extra.get("hidden_from_schema"): - raise SkipField(f"{field.name} field is being hidden") + msg = f"{field.name} field is being hidden" + raise SkipField(msg) else: return original_field_schema(field, **kwargs) @@ -102,7 +103,8 @@ def gen_pipeline_schema( if components_module: components = _add_components(components_module, components) if not components: - raise RuntimeError("No valid components found.") + msg = "No valid components found." + raise RuntimeError(msg) # Create a type union that will hold the union of all component types PipelineComponents = Union[components] # type: ignore[valid-type] diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml_loading.py index 9dc53c1ab..d8aee8b95 100644 --- a/kpops/utils/yaml_loading.py +++ b/kpops/utils/yaml_loading.py @@ -70,7 +70,8 @@ def substitute_nested(input: str, **kwargs) -> str: steps.add(new_str) old_str, new_str = new_str, substitute(new_str, kwargs) if new_str != old_str: + msg = "An infinite loop condition detected. Check substitution variables." raise ValueError( - "An infinite loop condition detected. Check substitution variables.", + msg, ) return old_str From 0160c08d322227f347177f2bf962efdb40106e5a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 00:10:45 +0300 Subject: [PATCH 22/76] ci(ruff-pie): autofix --- kpops/components/base_components/base_defaults_component.py | 1 - 1 file changed, 1 deletion(-) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 4cae46ce4..1ae6ace3a 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -113,7 +113,6 @@ def _validate_custom(self, **kwargs) -> None: :param kwargs: The init kwargs for the component """ - pass def load_defaults( From dd29e3a03f810cf30b44878885a0695eba6c221e Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 00:15:19 +0300 Subject: [PATCH 23/76] ci(ruff-pie): exclude "PIE804" --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 6ecf2b6d5..e4d18c18c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -94,6 +94,7 @@ ignore = [ "B010", # Do not call setattr with a constant attribute value. -- Not always applicable "RUF012", # type class attrs with `ClassVar` -- Too strict/trigger-happy "UP007", # Use X | Y for type annotations -- `typer` doesn't support it + "PIE804", # Unnecessary `dict` kwargs -- Inconvenient to enforce "RET505", # Unnecessary {branch} after return statement -- Lots of false positives "RET506", # Unnecessary {branch} after raise statement -- Lots of false positives "RET507", # Unnecessary {branch} after continue statement -- Lots of false positives From 49e9e4a67e4d5685ce44b889577028886a97cb77 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 00:16:49 +0300 Subject: [PATCH 24/76] ci(ruff-pytest): autofix "PT" --- .../helm_wrapper/test_dry_run_handler.py | 4 ++-- .../helm_wrapper/test_helm_wrapper.py | 8 ++++---- .../kafka_connect/test_connect_handler.py | 10 +++++----- .../kafka_connect/test_connect_wrapper.py | 2 +- tests/components/test_base_defaults_component.py | 4 ++-- tests/components/test_kafka_app.py | 4 ++-- tests/components/test_kafka_connector.py | 8 ++++---- tests/components/test_kafka_sink_connector.py | 4 ++-- tests/components/test_kafka_source_connector.py | 2 +- tests/components/test_kubernetes_app.py | 14 +++++++------- tests/components/test_producer_app.py | 6 +++--- tests/components/test_streams_app.py | 6 +++--- tests/pipeline/test_template.py | 2 +- tests/utils/test_environment.py | 4 ++-- 14 files changed, 39 insertions(+), 39 deletions(-) diff --git a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py index df44f3e1e..c75883743 100644 --- a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py +++ b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py @@ -12,13 +12,13 @@ class TestDryRunHandler: - @pytest.fixture + @pytest.fixture() def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm", ).return_value - @pytest.fixture + @pytest.fixture() def helm_diff_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( "kpops.component_handlers.helm_wrapper.dry_run_handler.HelmDiff", diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py index 1afdb7885..a06740577 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py +++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py @@ -29,15 +29,15 @@ def temp_file_mock(self, mocker: MockerFixture) -> MagicMock: temp_file_mock.return_value.__enter__.return_value.name = "values.yaml" return temp_file_mock - @pytest.fixture + @pytest.fixture() def run_command(self, mocker: MockerFixture) -> MagicMock: return mocker.patch.object(Helm, "_Helm__execute") - @pytest.fixture + @pytest.fixture() def log_warning_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.component_handlers.helm_wrapper.helm.log.warning") - @pytest.fixture + @pytest.fixture() def mock_get_version(self, mocker: MockerFixture) -> MagicMock: mock_get_version = mocker.patch.object(Helm, "get_version") mock_get_version.return_value = Version(major=3, minor=12, patch=0) @@ -498,7 +498,7 @@ def test_should_call_run_command_method_when_helm_template_without_optional_args ) @pytest.mark.parametrize( - "raw_version, expected_version", + ("raw_version", "expected_version"), [ ("v3.12.0+gc9f554d", Version(3, 12, 0)), ("v3.12.0", Version(3, 12, 0)), diff --git a/tests/component_handlers/kafka_connect/test_connect_handler.py b/tests/component_handlers/kafka_connect/test_connect_handler.py index ff6b7068c..172f1dee0 100644 --- a/tests/component_handlers/kafka_connect/test_connect_handler.py +++ b/tests/component_handlers/kafka_connect/test_connect_handler.py @@ -22,25 +22,25 @@ class TestConnectorHandler: - @pytest.fixture + @pytest.fixture() def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.info", ) - @pytest.fixture + @pytest.fixture() def log_warning_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.warning", ) - @pytest.fixture + @pytest.fixture() def log_error_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.error", ) - @pytest.fixture + @pytest.fixture() def renderer_diff_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( "kpops.component_handlers.kafka_connect.kafka_connect_handler.render_diff", @@ -53,7 +53,7 @@ def connector_handler(connect_wrapper: MagicMock) -> KafkaConnectHandler: timeout=0, ) - @pytest.fixture + @pytest.fixture() def connector_config(self) -> KafkaConnectorConfig: return KafkaConnectorConfig( **{ diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index d4dd13664..0033c43f2 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -34,7 +34,7 @@ def setup(self): ) self.connect_wrapper = ConnectWrapper(host=config.kafka_connect_host) - @pytest.fixture + @pytest.fixture() def connector_config(self) -> KafkaConnectorConfig: return KafkaConnectorConfig( **{ diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index 6e3e3d570..6c78d5fa1 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -37,7 +37,7 @@ class EnvVarTest(BaseDefaultsComponent): name: str | None = None -@pytest.fixture +@pytest.fixture() def config() -> PipelineConfig: return PipelineConfig( defaults_path=DEFAULTS_PATH, @@ -45,7 +45,7 @@ def config() -> PipelineConfig: ) -@pytest.fixture +@pytest.fixture() def handlers() -> ComponentHandlers: return ComponentHandlers( schema_handler=MagicMock(), diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index aae796153..18de732af 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -17,7 +17,7 @@ class TestKafkaApp: - @pytest.fixture + @pytest.fixture() def config(self) -> PipelineConfig: return PipelineConfig( defaults_path=DEFAULTS_PATH, @@ -25,7 +25,7 @@ def config(self) -> PipelineConfig: helm_diff_config=HelmDiffConfig(), ) - @pytest.fixture + @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( schema_handler=MagicMock(), diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index e22d26d52..b20e97a63 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -18,7 +18,7 @@ class TestKafkaConnector: - @pytest.fixture + @pytest.fixture() def config(self) -> PipelineConfig: return PipelineConfig( defaults_path=DEFAULTS_PATH, @@ -31,7 +31,7 @@ def config(self) -> PipelineConfig: helm_diff_config=HelmDiffConfig(), ) - @pytest.fixture + @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( schema_handler=MagicMock(), @@ -45,13 +45,13 @@ def helm_mock(self, mocker: MockerFixture) -> MagicMock: "kpops.components.base_components.kafka_connector.Helm", ).return_value - @pytest.fixture + @pytest.fixture() def dry_run_handler(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( "kpops.components.base_components.kafka_connector.DryRunHandler", ).return_value - @pytest.fixture + @pytest.fixture() def connector_config(self) -> KafkaConnectorConfig: return KafkaConnectorConfig( **{ diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index a0650c633..a748e8fd7 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -35,11 +35,11 @@ class TestKafkaSinkConnector(TestKafkaConnector): - @pytest.fixture + @pytest.fixture() def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.kafka_connector.log.info") - @pytest.fixture + @pytest.fixture() def connector( self, config: PipelineConfig, diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index 18548df34..a366e736a 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -32,7 +32,7 @@ class TestKafkaSourceConnector(TestKafkaConnector): - @pytest.fixture + @pytest.fixture() def connector( self, config: PipelineConfig, diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index cc2b4d275..f06ff44ac 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -27,7 +27,7 @@ class KubernetesTestValue(KubernetesAppConfig): class TestKubernetesApp: - @pytest.fixture + @pytest.fixture() def config(self) -> PipelineConfig: return PipelineConfig( defaults_path=DEFAULTS_PATH, @@ -35,7 +35,7 @@ def config(self) -> PipelineConfig: helm_diff_config=HelmDiffConfig(), ) - @pytest.fixture + @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( schema_handler=MagicMock(), @@ -43,25 +43,25 @@ def handlers(self) -> ComponentHandlers: topic_handler=MagicMock(), ) - @pytest.fixture + @pytest.fixture() def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( "kpops.components.base_components.kubernetes_app.Helm", ).return_value - @pytest.fixture + @pytest.fixture() def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.kubernetes_app.log.info") - @pytest.fixture + @pytest.fixture() def app_value(self) -> KubernetesTestValue: return KubernetesTestValue(**{"name_override": "test-value"}) - @pytest.fixture + @pytest.fixture() def repo_config(self) -> HelmRepoConfig: return HelmRepoConfig(repository_name="test", url="https://bakdata.com") - @pytest.fixture + @pytest.fixture() def kubernetes_app( self, config: PipelineConfig, diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 216202b60..a0da3e909 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -21,7 +21,7 @@ class TestProducerApp: PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop" PRODUCER_APP_CLEAN_NAME = "test-producer-app-with-long-n-clean" - @pytest.fixture + @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( schema_handler=MagicMock(), @@ -29,7 +29,7 @@ def handlers(self) -> ComponentHandlers: topic_handler=MagicMock(), ) - @pytest.fixture + @pytest.fixture() def config(self) -> PipelineConfig: return PipelineConfig( defaults_path=DEFAULTS_PATH, @@ -40,7 +40,7 @@ def config(self) -> PipelineConfig: ), ) - @pytest.fixture + @pytest.fixture() def producer_app( self, config: PipelineConfig, handlers: ComponentHandlers, ) -> ProducerApp: diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 8cc46d538..e2ede73bb 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -25,7 +25,7 @@ class TestStreamsApp: STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop" STREAMS_APP_CLEAN_NAME = "test-streams-app-with-long-na-clean" - @pytest.fixture + @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( schema_handler=MagicMock(), @@ -33,7 +33,7 @@ def handlers(self) -> ComponentHandlers: topic_handler=MagicMock(), ) - @pytest.fixture + @pytest.fixture() def config(self) -> PipelineConfig: return PipelineConfig( defaults_path=DEFAULTS_PATH, @@ -45,7 +45,7 @@ def config(self) -> PipelineConfig: helm_diff_config=HelmDiffConfig(), ) - @pytest.fixture + @pytest.fixture() def streams_app( self, config: PipelineConfig, handlers: ComponentHandlers, ) -> StreamsApp: diff --git a/tests/pipeline/test_template.py b/tests/pipeline/test_template.py index cd4436b7a..a43fbec5b 100644 --- a/tests/pipeline/test_template.py +++ b/tests/pipeline/test_template.py @@ -15,7 +15,7 @@ class TestTemplate: - @pytest.fixture + @pytest.fixture() def run_command(self, mocker: MockerFixture) -> MagicMock: return mocker.patch.object(Helm, "_Helm__execute") diff --git a/tests/utils/test_environment.py b/tests/utils/test_environment.py index 88e84707a..7cd5a2430 100644 --- a/tests/utils/test_environment.py +++ b/tests/utils/test_environment.py @@ -5,12 +5,12 @@ from kpops.utils.environment import Environment -@pytest.fixture +@pytest.fixture() def fake_environment_windows(): return {"MY": "fake", "ENVIRONMENT": "here"} -@pytest.fixture +@pytest.fixture() def fake_environment_linux(): return {"my": "fake", "environment": "here"} From b93c3d8bf91a4d6bf6ef41b5d2982387dd948537 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 00:59:18 +0300 Subject: [PATCH 25/76] chore(ruff-pytest): manually fix code flagged by "PT" --- tests/cli/test_schema_generation.py | 3 +- tests/compiler/test_pipeline_name.py | 2 +- .../helm_wrapper/test_helm_wrapper.py | 3 +- .../kafka_connect/test_connect_wrapper.py | 2 +- .../schema_handler/test_schema_handler.py | 52 +++++++++---------- .../topic/test_proxy_wrapper.py | 8 +-- .../topic/test_topic_handler.py | 4 +- tests/components/test_streams_app.py | 4 +- tests/pipeline/test_pipeline.py | 3 +- 9 files changed, 35 insertions(+), 46 deletions(-) diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index fe66a5990..daf474b7c 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -107,7 +107,7 @@ def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): def test_gen_pipeline_schema_no_components(self): with pytest.raises(RuntimeError, match="^No valid components found.$"): - result = runner.invoke( + runner.invoke( app, [ "schema", @@ -117,7 +117,6 @@ def test_gen_pipeline_schema_no_components(self): ], catch_exceptions=False, ) - assert result.exit_code == 1 def test_gen_pipeline_schema_only_stock_module(self): result = runner.invoke( diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 7a07c1a12..fa2d3407a 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -50,7 +50,7 @@ def test_should_set_pipeline_name_with_absolute_base_dir(): def test_should_not_set_pipeline_name_with_the_same_base_dir(): - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="The pipeline-base-dir should not equal the pipeline-path"): Pipeline.set_pipeline_name_env_vars(PIPELINE_PATH, PIPELINE_PATH) diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py index a06740577..d838f530c 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py +++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py @@ -337,8 +337,7 @@ def test_raise_parse_error_when_helm_content_is_invalid(self): """, ) with pytest.raises(ParseError, match="Not a valid Helm template source"): - helm_template = list(Helm.load_manifest(stdout)) - assert len(helm_template) == 0 + list(Helm.load_manifest(stdout)) def test_load_manifest(self): stdout = dedent( diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 0033c43f2..714b263d5 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -26,7 +26,7 @@ class TestConnectorApiWrapper: @pytest.fixture(autouse=True) - def setup(self): + def _setup(self): config = PipelineConfig( defaults_path=DEFAULTS_PATH, environment="development", diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index bd5815b12..51ff483ca 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -116,45 +116,42 @@ def test_should_raise_value_error_if_schema_provider_class_not_found(): url="http://mock:8081", components_module=NON_EXISTING_PROVIDER_MODULE, ) - with pytest.raises(ValueError) as value_error: + with pytest.raises( + ValueError, + match="No schema provider found in components module pydantic.main. " + "Please implement the abstract method in " + f"{SchemaProvider.__module__}.{SchemaProvider.__name__}." + ): schema_handler.schema_provider.provide_schema( "com.bakdata.kpops.test.SchemaHandlerTest", {}, ) - assert ( - str(value_error.value) - == "No schema provider found in components module pydantic.main. " - "Please implement the abstract method in " - f"{SchemaProvider.__module__}.{SchemaProvider.__name__}." - ) - - -def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty(): +@pytest.mark.parametrize( + ("components_module"), + [ + pytest.param( + None, + id="components_module = None", + ), + pytest.param( + "", + id="components_module = ''", + ), + ] +) +def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty(components_module): config_enable = PipelineConfig( defaults_path=Path("fake"), environment="development", schema_registry_url="http://localhost:8081", ) - - with pytest.raises(ValueError): - schema_handler = SchemaHandler.load_schema_handler(None, config_enable) - assert schema_handler is not None - schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", {}, - ) - - with pytest.raises(ValueError) as value_error: - schema_handler = SchemaHandler.load_schema_handler("", config_enable) - assert schema_handler is not None + schema_handler = SchemaHandler.load_schema_handler(components_module, config_enable) + assert schema_handler is not None + with pytest.raises(ValueError, match="The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your SchemaProvider implementation exists."): schema_handler.schema_provider.provide_schema( "com.bakdata.kpops.test.SchemaHandlerTest", {}, ) - assert ( - str(value_error.value) - == "The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your SchemaProvider implementation exists." - ) - def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, @@ -210,10 +207,9 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl schema_registry_mock.check_version.return_value = None schema_registry_mock.test_compatibility.return_value = False - with pytest.raises(Exception) as exception: + with pytest.raises(Exception, match="Schema is not compatible for") as exception: schema_handler.submit_schemas(to_section, True) - assert "Schema is not compatible for" in str(exception.value) EXPECTED_SCHEMA = { "type": "record", "name": "KPOps.Employee", diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index c8fb3e94e..f7142f496 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -30,7 +30,7 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.component_handlers.topic.proxy_wrapper.log.debug") @pytest.fixture(autouse=True) - def setup(self, httpx_mock: HTTPXMock): + def _setup(self, httpx_mock: HTTPXMock): config = PipelineConfig( defaults_path=DEFAULTS_PATH, environment="development", kafka_rest_host=HOST, ) @@ -53,12 +53,8 @@ def setup(self, httpx_mock: HTTPXMock): def test_should_raise_exception_when_host_is_not_set(self): config = PipelineConfig(defaults_path=DEFAULTS_PATH, environment="development") config.kafka_rest_host = None - with pytest.raises(ValueError) as exception: + with pytest.raises(ValueError, match="The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST."): ProxyWrapper(pipeline_config=config) - assert ( - str(exception.value) - == "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST." - ) @patch("httpx.post") def test_should_create_topic_with_all_topic_configuration( diff --git a/tests/component_handlers/topic/test_topic_handler.py b/tests/component_handlers/topic/test_topic_handler.py index a64a239a9..9678cd86f 100644 --- a/tests/component_handlers/topic/test_topic_handler.py +++ b/tests/component_handlers/topic/test_topic_handler.py @@ -369,7 +369,7 @@ def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( match="Topic Creation: partition count of topic topic-X changed! Partitions count of topic topic-X is 10. The given partitions count 200.", ): topic_handler.create_topics(to_section=to_section, dry_run=True) - wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff + wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( self, get_topic_response_mock: MagicMock, @@ -391,7 +391,7 @@ def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( match="Topic Creation: replication factor of topic topic-X changed! Replication factor of topic topic-X is 3. The given replication count 300.", ): topic_handler.create_topics(to_section=to_section, dry_run=True) - wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff + wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff def test_should_log_correct_message_when_delete_existing_topic_dry_run( self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index e2ede73bb..1fa29ca9a 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -145,7 +145,7 @@ def test_no_empty_input_topic( def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandlers): # An exception should be raised when both role and type are defined and type is input - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Define role only if `type` is `pattern` or `None`"): StreamsApp( name=self.STREAMS_APP_NAME, config=config, @@ -167,7 +167,7 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle ) # An exception should be raised when both role and type are defined and type is error - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Define `role` only if `type` is undefined"): StreamsApp( name=self.STREAMS_APP_NAME, config=config, diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 41208322b..1a105bec4 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -3,7 +3,6 @@ import pytest import yaml -from pytest import MonkeyPatch from snapshottest.module import SnapshotTest from typer.testing import CliRunner @@ -461,7 +460,7 @@ def test_default_config(self, snapshot: SnapshotTest): def test_env_vars_precedence_over_config( self, - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, snapshot: SnapshotTest, ): monkeypatch.setenv(name="KPOPS_KAFKA_BROKERS", value="env_broker") From 9a8678a810337a6131670b1dfba80dcefe0576fc Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:00:54 +0300 Subject: [PATCH 26/76] ci(ruff-raise): autofix "RSE" --- kpops/cli/main.py | 2 +- kpops/component_handlers/helm_wrapper/helm.py | 2 +- kpops/component_handlers/kafka_connect/connect_wrapper.py | 4 ++-- kpops/component_handlers/topic/proxy_wrapper.py | 4 ++-- kpops/components/base_components/kafka_app.py | 2 +- kpops/pipeline_generator/pipeline.py | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 540e4d5c1..314290506 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -372,7 +372,7 @@ def clean( def version_callback(show_version: bool) -> None: if show_version: typer.echo(f"KPOps {__version__}") - raise typer.Exit() + raise typer.Exit @app.callback() diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index f717f2e6d..821e26649 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -235,7 +235,7 @@ def parse_helm_command_stderr_output(stderr: str) -> None: for line in stderr.splitlines(): lower = line.lower() if "release: not found" in lower: - raise ReleaseNotFoundException() + raise ReleaseNotFoundException elif "error" in lower: raise RuntimeError(stderr) elif "warning" in lower: diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 7f81abb56..df1a36bd7 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -79,7 +79,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: return KafkaConnectResponse(**response.json()) elif response.status_code == httpx.codes.NOT_FOUND: log.info(f"The named connector {connector_name} does not exists.") - raise ConnectorNotFoundException() + raise ConnectorNotFoundException elif response.status_code == httpx.codes.CONFLICT: log.warning( "Rebalancing in progress while getting a connector... Retrying...", @@ -170,7 +170,7 @@ def delete_connector(self, connector_name: str) -> None: return elif response.status_code == httpx.codes.NOT_FOUND: log.info(f"The named connector {connector_name} does not exists.") - raise ConnectorNotFoundException() + raise ConnectorNotFoundException elif response.status_code == httpx.codes.CONFLICT: log.warning( "Rebalancing in progress while deleting a connector... Retrying...", diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 0d7ad2ebb..9eb706b96 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -122,7 +122,7 @@ def get_topic(self, topic_name: str) -> TopicResponse: ): log.debug(f"Topic {topic_name} not found.") log.debug(response.json()) - raise TopicNotFoundException() + raise TopicNotFoundException raise KafkaRestProxyError(response) @@ -153,7 +153,7 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: ): log.debug(f"Configs for {topic_name} not found.") log.debug(response.json()) - raise TopicNotFoundException() + raise TopicNotFoundException raise KafkaRestProxyError(response) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index c217a3c92..e078c63cf 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -83,7 +83,7 @@ class KafkaApp(KubernetesApp, ABC): @property def clean_up_helm_chart(self) -> str: """Helm chart used to destroy and clean this component.""" - raise NotImplementedError() + raise NotImplementedError @override def deploy(self, dry_run: bool) -> None: diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index b11e933fb..8dfbddf24 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -182,7 +182,7 @@ def parse_components(self, component_list: list[dict]) -> None: msg, ) from ex else: - raise ParsingException() from ex + raise ParsingException from ex def apply_component( self, component_class: type[PipelineComponent], component_data: dict, From f90dd22bbd64823471395123105995d2cf20e5d9 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:02:25 +0300 Subject: [PATCH 27/76] ci(ruff-return): autofix "RET" --- kpops/component_handlers/kafka_connect/timeout.py | 3 +-- kpops/components/base_components/base_defaults_component.py | 3 +-- kpops/components/base_components/kafka_connector.py | 3 +-- kpops/pipeline_generator/pipeline.py | 3 +-- 4 files changed, 4 insertions(+), 8 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index cc9b0d127..8320df648 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -26,8 +26,7 @@ async def main_supervisor(func: Callable[..., T], secs: int) -> T: loop = asyncio.get_event_loop() try: - complete = loop.run_until_complete(main_supervisor(func, secs)) - return complete + return loop.run_until_complete(main_supervisor(func, secs)) except TimeoutError: log.error( f"Kafka Connect operation {func.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml.", diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 1ae6ace3a..54ff0ea64 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -105,8 +105,7 @@ def extend_with_defaults(self, **kwargs) -> dict: defaults = load_defaults( self.__class__, main_default_file_path, environment_default_file_path, ) - kwargs = update_nested(kwargs, defaults) - return kwargs + return update_nested(kwargs, defaults) def _validate_custom(self, **kwargs) -> None: """Run custom validation on component. diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 584096990..f092979c6 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -106,8 +106,7 @@ def helm(self) -> Helm: def _resetter_release_name(self) -> str: suffix = "-clean" clean_up_release_name = self.full_name + suffix - trimmed_name = trim_release_name(clean_up_release_name, suffix) - return trimmed_name + return trim_release_name(clean_up_release_name, suffix) @property def _resetter_helm_chart(self) -> str: diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 8dfbddf24..4475acf16 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -153,8 +153,7 @@ def load_from_yaml( msg, ) - pipeline = cls(main_content, env_content, registry, config, handlers) - return pipeline + return cls(main_content, env_content, registry, config, handlers) def parse_components(self, component_list: list[dict]) -> None: """Instantiate, enrich and inflate a list of components. From 68b824b287fcf3bee03bae694077f1f9c1814118 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:06:47 +0300 Subject: [PATCH 28/76] ci(ruff-simplify): autofix "SIM" --- kpops/component_handlers/topic/handler.py | 2 +- tests/compiler/test_pipeline_name.py | 36 +++++++++++------------ tests/components/test_kubernetes_app.py | 4 +-- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 38de9c66b..1307717c9 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -65,7 +65,7 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: self.proxy_wrapper.create_topic(topic_spec=topic_spec) def delete_topics(self, to_section: ToSection, dry_run: bool) -> None: - for topic_name in to_section.topics.keys(): + for topic_name in to_section.topics: if dry_run: self.__dry_run_topic_deletion(topic_name=topic_name) else: diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index fa2d3407a..979c781e2 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -14,39 +14,39 @@ def test_should_set_pipeline_name_with_default_base_dir(): Pipeline.set_pipeline_name_env_vars(DEFAULT_BASE_DIR, PIPELINE_PATH) - assert "some-random-path-for-testing" == ENV["pipeline_name"] - assert "some" == ENV["pipeline_name_0"] - assert "random" == ENV["pipeline_name_1"] - assert "path" == ENV["pipeline_name_2"] - assert "for" == ENV["pipeline_name_3"] - assert "testing" == ENV["pipeline_name_4"] + assert ENV["pipeline_name"] == "some-random-path-for-testing" + assert ENV["pipeline_name_0"] == "some" + assert ENV["pipeline_name_1"] == "random" + assert ENV["pipeline_name_2"] == "path" + assert ENV["pipeline_name_3"] == "for" + assert ENV["pipeline_name_4"] == "testing" def test_should_set_pipeline_name_with_specific_relative_base_dir(): Pipeline.set_pipeline_name_env_vars(Path("./some/random/path"), PIPELINE_PATH) - assert "for-testing" == ENV["pipeline_name"] - assert "for" == ENV["pipeline_name_0"] - assert "testing" == ENV["pipeline_name_1"] + assert ENV["pipeline_name"] == "for-testing" + assert ENV["pipeline_name_0"] == "for" + assert ENV["pipeline_name_1"] == "testing" def test_should_set_pipeline_name_with_specific_absolute_base_dir(): Pipeline.set_pipeline_name_env_vars(Path("some/random/path"), PIPELINE_PATH) - assert "for-testing" == ENV["pipeline_name"] - assert "for" == ENV["pipeline_name_0"] - assert "testing" == ENV["pipeline_name_1"] + assert ENV["pipeline_name"] == "for-testing" + assert ENV["pipeline_name_0"] == "for" + assert ENV["pipeline_name_1"] == "testing" def test_should_set_pipeline_name_with_absolute_base_dir(): Pipeline.set_pipeline_name_env_vars(Path.cwd(), PIPELINE_PATH) - assert "some-random-path-for-testing" == ENV["pipeline_name"] - assert "some" == ENV["pipeline_name_0"] - assert "random" == ENV["pipeline_name_1"] - assert "path" == ENV["pipeline_name_2"] - assert "for" == ENV["pipeline_name_3"] - assert "testing" == ENV["pipeline_name_4"] + assert ENV["pipeline_name"] == "some-random-path-for-testing" + assert ENV["pipeline_name_0"] == "some" + assert ENV["pipeline_name_1"] == "random" + assert ENV["pipeline_name_2"] == "path" + assert ENV["pipeline_name_3"] == "for" + assert ENV["pipeline_name_4"] == "testing" def test_should_not_set_pipeline_name_with_the_same_base_dir(): diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index f06ff44ac..de8a4723d 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -195,8 +195,8 @@ def test_should_raise_not_implemented_error_when_helm_chart_is_not_set( kubernetes_app.deploy(True) helm_mock.add_repo.assert_called() assert ( - "Please implement the helm_chart property of the kpops.components.base_components.kubernetes_app module." - == str(error.value) + str(error.value) + == "Please implement the helm_chart property of the kpops.components.base_components.kubernetes_app module." ) def test_should_call_helm_uninstall_when_destroying_kubernetes_app( From 57fd749f6355af6738d2db61daf1df44568493be Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:19:21 +0300 Subject: [PATCH 29/76] ci(ruff-type-checking): disable "TCH" temporarily --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e4d18c18c..d50add060 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,7 +133,7 @@ select = [ "RET", # flake8-return "SLOT", # flake8-slots "SIM", # flake8-simplify - "TCH", # flake8-type-checking + # "TCH", # flake8-type-checking, configure correctly and add "ARG", # flake8-unused-arguments "PTH", # flake8-use-pathlib "PGH", # pygrep-hooks From 57d9b501657c59ce2f7a9d13143e3ed25c6c2423 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:21:38 +0300 Subject: [PATCH 30/76] ci(ruff): remove "ARG" --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d50add060..95ea5eecc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -134,7 +134,6 @@ select = [ "SLOT", # flake8-slots "SIM", # flake8-simplify # "TCH", # flake8-type-checking, configure correctly and add - "ARG", # flake8-unused-arguments "PTH", # flake8-use-pathlib "PGH", # pygrep-hooks "PL", # Pylint From 4e2dd33fb0390ce214f0e6dbbb08ec5dd38597fb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:22:49 +0300 Subject: [PATCH 31/76] ci(ruff-pathlib): autofix "PTH" --- kpops/cli/main.py | 2 +- tests/compiler/test_pipeline_name.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 314290506..5fbbe3888 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -31,7 +31,7 @@ app = dtyper.Typer(pretty_exceptions_enable=False) BASE_DIR_PATH_OPTION: Path = typer.Option( - default=Path("."), + default=Path(), exists=True, dir_okay=True, file_okay=False, diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 979c781e2..197c1310c 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -8,7 +8,7 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" PIPELINE_PATH = Path("./some/random/path/for/testing/pipeline.yaml") -DEFAULT_BASE_DIR = Path(".") +DEFAULT_BASE_DIR = Path() def test_should_set_pipeline_name_with_default_base_dir(): From 2d8129e75793af6ce9f3e2d4c44b0db711b3ec18 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:27:14 +0300 Subject: [PATCH 32/76] chore(ruff-pathlib): manually fix code flagged by "PTH" --- kpops/cli/registry.py | 3 ++- kpops/utils/yaml_loading.py | 2 +- .../kafka_connect/test_connect_wrapper.py | 4 ++-- .../topic/test_proxy_wrapper.py | 4 ++-- .../topic/test_topic_handler.py | 20 +++++++++---------- 5 files changed, 17 insertions(+), 16 deletions(-) diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index 30a9c1205..8b6c99522 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -3,6 +3,7 @@ import importlib import inspect import os +from pathlib import Path import sys from collections.abc import Iterator from dataclasses import dataclass, field @@ -17,7 +18,7 @@ T = TypeVar("T") ClassDict = dict[str, type[T]] # type -> class -sys.path.append(os.getcwd()) +sys.path.append(str(Path.cwd())) @dataclass diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml_loading.py index d8aee8b95..36848d4d2 100644 --- a/kpops/utils/yaml_loading.py +++ b/kpops/utils/yaml_loading.py @@ -20,7 +20,7 @@ def generate_hashkey( def load_yaml_file( file_path: Path, *, substitution: Mapping[str, Any] | None = None, ) -> dict | list[dict]: - with open(file_path) as yaml_file: + with file_path.open() as yaml_file: return yaml.load(substitute(yaml_file.read(), substitution), Loader=yaml.Loader) diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 714b263d5..afac5538d 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -495,9 +495,9 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( ) def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): - with open( + with Path( DEFAULTS_PATH / "connect_validation_response.json", - ) as f: + ).open() as f: actual_response = json.load(f) httpx_mock.add_response( method="PUT", diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index f7142f496..73e8aab4d 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -36,9 +36,9 @@ def _setup(self, httpx_mock: HTTPXMock): ) self.proxy_wrapper = ProxyWrapper(pipeline_config=config) - with open( + with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json", - ) as f: + ).open() as f: cluster_response = json.load(f) httpx_mock.add_response( diff --git a/tests/component_handlers/topic/test_topic_handler.py b/tests/component_handlers/topic/test_topic_handler.py index 9678cd86f..6ca8410e2 100644 --- a/tests/component_handlers/topic/test_topic_handler.py +++ b/tests/component_handlers/topic/test_topic_handler.py @@ -51,19 +51,19 @@ def log_error_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def get_topic_response_mock(self) -> MagicMock: - with open( + with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses/get_topic_response.json", - ) as f: + ).open() as f: response = json.load(f) - with open( + with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses/broker_response.json", - ) as f: + ).open() as f: broker_response = json.load(f) - with open( + with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses/topic_config_response.json", - ) as f: + ).open() as f: response_topic_config = json.load(f) wrapper = MagicMock() @@ -76,14 +76,14 @@ def get_topic_response_mock(self) -> MagicMock: @pytest.fixture(autouse=True) def get_default_topic_response_mock(self) -> MagicMock: - with open( + with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses/get_default_topic_response.json", - ) as f: + ).open() as f: response = json.load(f) - with open( + with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses/broker_response.json", - ) as f: + ).open() as f: broker_response = json.load(f) wrapper = MagicMock() From 07bf0f40872159c9296c45caf7adca2cce21e99b Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:33:14 +0300 Subject: [PATCH 33/76] chore(ruff-pygrep): manually fix code flagged by "PGH" --- kpops/components/base_components/base_defaults_component.py | 2 +- kpops/utils/gen_schema.py | 2 +- tests/components/test_kafka_connector.py | 6 +++--- tests/pipeline/test_components/components.py | 5 +++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 54ff0ea64..61353c895 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -77,7 +77,7 @@ def __init__(self, **kwargs) -> None: self._validate_custom(**kwargs) @cached_classproperty - def type(cls: type[Self]) -> str: # pyright: ignore + def type(cls: type[Self]) -> str: # pyright: ignore[reportGeneralTypeIssues] """Return calling component's type. :returns: Component class name in dash-case diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index e04259403..0b104064c 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -112,7 +112,7 @@ def gen_pipeline_schema( for component in components: component.__fields__["type"] = ModelField( name="type", - type_=Literal[component.type], # type: ignore + type_=Literal[component.type], # type: ignore[reportGeneralTypeIssues] required=False, default=component.type, final=True, diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index b20e97a63..fea299cf2 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -79,7 +79,7 @@ def test_connector_config_name_override( name=CONNECTOR_NAME, config=config, handlers=handlers, - app={"connector.class": CONNECTOR_CLASS}, # type: ignore + app={"connector.class": CONNECTOR_CLASS}, # type: ignore[reportGeneralTypeIssues] namespace="test-namespace", ) assert connector.app.name == CONNECTOR_FULL_NAME @@ -91,7 +91,7 @@ def test_connector_config_name_override( name=CONNECTOR_NAME, config=config, handlers=handlers, - app={"connector.class": CONNECTOR_CLASS, "name": "different-name"}, # type: ignore + app={"connector.class": CONNECTOR_CLASS, "name": "different-name"}, # type: ignore[reportGeneralTypeIssues] namespace="test-namespace", ) @@ -102,6 +102,6 @@ def test_connector_config_name_override( name=CONNECTOR_NAME, config=config, handlers=handlers, - app={"connector.class": CONNECTOR_CLASS, "name": ""}, # type: ignore + app={"connector.class": CONNECTOR_CLASS, "name": ""}, # type: ignore[reportGeneralTypeIssues] namespace="test-namespace", ) diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index 0432a2184..ddbefad04 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -44,7 +44,8 @@ def inflate(self) -> list[PipelineComponent]: config=self.config, handlers=self.handlers, namespace="example-namespace", - app={ # type: ignore # FIXME + # FIXME + app={ # type: ignore[reportGeneralTypeIssues] "topics": topic_name, "transforms.changeTopic.replacement": f"{topic_name}-index-v1", }, @@ -64,7 +65,7 @@ def inflate(self) -> list[PipelineComponent]: name=f"{self.name}-inflated-streams-app", config=self.config, handlers=self.handlers, - to=ToSection( # type: ignore + to=ToSection( # type: ignore[reportGeneralTypeIssues] topics={ TopicName( f"{self.full_name}-" + "${component_name}", From 86643423761e6515658fd5024f5afe5ed7fc1188 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:35:07 +0300 Subject: [PATCH 34/76] ci(ruff-pylint): autofix "PL" --- tests/cli/test_schema_generation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index daf474b7c..4c0da3c37 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -9,7 +9,7 @@ from snapshottest.module import SnapshotTest from typer.testing import CliRunner -import tests.cli.resources.empty_module as empty_module +from tests.cli.resources import empty_module from kpops.cli.main import app from kpops.components.base_components import PipelineComponent from kpops.utils.docstring import describe_attr From 0d65ea8209acf0f985643eef603f727c2b35b52a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:46:32 +0300 Subject: [PATCH 35/76] ci(ruff): ignore "PLW2901" --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 95ea5eecc..2b667a2d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,6 +101,7 @@ ignore = [ "RET508", # Unnecessary {branch} after break statement -- Lots of false positives "PLR09", # upper bound on number of arguments, functions, etc. -- Inconvenient to enforce "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce + "PLW2901", # `for` loop variable `{var}` overwritten by assignment target -- Inconvenient to enforce "TRY002", # Create your own exception -- Inconvenient to enforce "TRY003", # Avoid specifying long messages outside the exception class -- Inconvenient to enforce ] From 54d4b4e2d241eaaf2ff8b65286ee240f449bfd29 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:46:59 +0300 Subject: [PATCH 36/76] chore(ruff-pylint): manually fix code flagged by "PL" --- hooks/gen_docs/gen_docs_cli_usage.py | 2 +- kpops/component_handlers/helm_wrapper/helm.py | 1 + kpops/utils/dict_differ.py | 6 +++--- kpops/utils/dict_ops.py | 5 ++--- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/hooks/gen_docs/gen_docs_cli_usage.py b/hooks/gen_docs/gen_docs_cli_usage.py index f03c4fd62..469274745 100644 --- a/hooks/gen_docs/gen_docs_cli_usage.py +++ b/hooks/gen_docs/gen_docs_cli_usage.py @@ -21,7 +21,7 @@ "--output", str(PATH_CLI_COMMANDS_DOC), ] - subprocess.run(typer_args) + subprocess.run(typer_args, check=True, capture_output=True) # Replace wrong title in CLI Usage doc with PATH_CLI_COMMANDS_DOC.open("r") as f: diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 821e26649..bc75011eb 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -214,6 +214,7 @@ def __execute(self, command: list[str]) -> str: log.debug(f"Executing {' '.join(command)}") process = subprocess.run( command, + check=True, capture_output=True, text=True, ) diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index 005cd1e71..b5760ed33 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -55,9 +55,9 @@ def from_dicts( d1: dict, d2: dict, ignore: set[str] | None = None, ) -> Iterator[Diff]: for diff_type, keys, changes in diff(d1, d2, ignore=ignore): - if not isinstance(changes, list): - changes = [("", changes)] - for key, change in changes: + if not isinstance(changes_tmp:=changes, list): + changes_tmp = [("", changes)] + for key, change in changes_tmp: yield Diff( DiffType.from_str(diff_type), Diff.__find_changed_key(keys, key), diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index c52ae8e42..e4adf81cd 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -20,9 +20,8 @@ def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: nested_val = original_dict.get(key, {}) if isinstance(nested_val, dict): original_dict[key] = update_nested_pair(nested_val, value) - else: - if key not in original_dict: - original_dict[key] = value + elif key not in original_dict: + original_dict[key] = value return original_dict From 8be02631164d2502b34f1ecd282d868354227f6d Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:49:06 +0300 Subject: [PATCH 37/76] ci(ruff-tryceratops): autofix "TRY" --- kpops/component_handlers/helm_wrapper/helm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index bc75011eb..287c250e5 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -64,7 +64,7 @@ def add_repo( ): log.error(f"Could not add repository {repository_name}. {e}") else: - raise e + raise if self._version.minor > 7: self.__execute(["helm", "repo", "update", repository_name]) From e73f9ab1c80b54ad67c67d726cf34c2c1a92def6 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:51:37 +0300 Subject: [PATCH 38/76] chore(ruff-tryceratops): manually fix code flagged by "TRY" --- kpops/component_handlers/helm_wrapper/helm.py | 2 +- kpops/component_handlers/kafka_connect/timeout.py | 2 +- kpops/component_handlers/utils/exception.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 287c250e5..0086f3d40 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -62,7 +62,7 @@ def add_repo( ) is not None ): - log.error(f"Could not add repository {repository_name}. {e}") + log.exception(f"Could not add repository {repository_name}.") else: raise diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index 8320df648..8dc7c865a 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -28,6 +28,6 @@ async def main_supervisor(func: Callable[..., T], secs: int) -> T: try: return loop.run_until_complete(main_supervisor(func, secs)) except TimeoutError: - log.error( + log.exception( f"Kafka Connect operation {func.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml.", ) diff --git a/kpops/component_handlers/utils/exception.py b/kpops/component_handlers/utils/exception.py index 00bdca315..5de7f7717 100644 --- a/kpops/component_handlers/utils/exception.py +++ b/kpops/component_handlers/utils/exception.py @@ -10,11 +10,11 @@ def __init__(self, response: httpx.Response) -> None: self.error_code = response.status_code self.error_msg = "Something went wrong!" try: - log.error( + log.exception( f"The request responded with the code {self.error_code}. Error body: {response.json()}", ) response.raise_for_status() except httpx.HTTPError as e: self.error_msg = str(e) - log.error(f"More information: {self.error_msg}") + log.exception(f"More information: {self.error_msg}") super().__init__() From 794c60d0e547006bdb93855ed44d215d8d70a757 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:53:57 +0300 Subject: [PATCH 39/76] chore(pre-commit): pass pre-commit checks --- docs/docs/schema/config.json | 2 +- docs/docs/schema/pipeline.json | 44 +- kpops/cli/custom_formatter.py | 4 +- kpops/cli/main.py | 55 +- kpops/cli/pipeline_config.py | 3 +- kpops/cli/registry.py | 3 +- kpops/component_handlers/helm_wrapper/helm.py | 3 +- .../helm_wrapper/helm_diff.py | 3 +- .../component_handlers/helm_wrapper/model.py | 25 +- .../kafka_connect/connect_wrapper.py | 21 +- .../kafka_connect/kafka_connect_handler.py | 8 +- .../kafka_connect/timeout.py | 2 +- .../schema_handler/schema_handler.py | 23 +- .../schema_handler/schema_provider.py | 4 +- kpops/component_handlers/topic/handler.py | 13 +- kpops/component_handlers/topic/utils.py | 3 +- .../base_defaults_component.py | 17 +- kpops/components/base_components/kafka_app.py | 23 +- .../base_components/kafka_connector.py | 23 +- .../base_components/kubernetes_app.py | 4 +- .../base_components/models/from_section.py | 3 +- .../base_components/models/to_section.py | 13 +- .../streams_bootstrap/producer/model.py | 9 +- .../streams_bootstrap/streams/model.py | 30 +- kpops/pipeline_generator/pipeline.py | 10 +- kpops/utils/dict_differ.py | 8 +- kpops/utils/dict_ops.py | 6 +- kpops/utils/gen_schema.py | 19 +- kpops/utils/yaml_loading.py | 7 +- tests/cli/resources/module.py | 4 +- .../snapshots/snap_test_schema_generation.py | 7 +- tests/cli/test_pipeline_steps.py | 8 +- tests/cli/test_schema_generation.py | 10 +- tests/compiler/test_pipeline_name.py | 5 +- .../helm_wrapper/test_dry_run_handler.py | 6 +- .../helm_wrapper/test_helm_wrapper.py | 49 +- .../kafka_connect/test_connect_handler.py | 10 +- .../kafka_connect/test_connect_wrapper.py | 40 +- .../schema_handler/resources/module.py | 4 +- .../schema_handler/test_schema_handler.py | 65 +- .../topic/test_proxy_wrapper.py | 28 +- .../topic/test_topic_handler.py | 40 +- .../test_base_defaults_component.py | 20 +- tests/components/test_kafka_app.py | 3 +- tests/components/test_kafka_connector.py | 6 +- tests/components/test_kafka_sink_connector.py | 33 +- .../components/test_kafka_source_connector.py | 33 +- tests/components/test_kubernetes_app.py | 13 +- tests/components/test_producer_app.py | 33 +- tests/components/test_streams_app.py | 73 +- tests/pipeline/snapshots/snap_test_example.py | 579 ++- .../pipeline/snapshots/snap_test_pipeline.py | 4307 ++++++++--------- tests/pipeline/test_components/components.py | 7 +- tests/pipeline/test_pipeline.py | 3 +- tests/utils/test_environment.py | 3 +- 55 files changed, 3051 insertions(+), 2726 deletions(-) diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index b77b4e850..a2f18eb6b 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -2,7 +2,7 @@ "$ref": "#/definitions/PipelineConfig", "definitions": { "HelmConfig": { - "description": "Global Helm configuration", + "description": "Global Helm configuration.", "properties": { "api_version": { "description": "Kubernetes API version used for Capabilities.APIVersions", diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 9695ec9a2..7e77b0ddd 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -2,7 +2,7 @@ "definitions": { "FromSection": { "additionalProperties": false, - "description": "Holds multiple input topics", + "description": "Holds multiple input topics.", "properties": { "components": { "additionalProperties": { @@ -28,7 +28,7 @@ }, "FromTopic": { "additionalProperties": false, - "description": "Input topic", + "description": "Input topic.", "properties": { "role": { "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", @@ -48,7 +48,7 @@ "type": "object" }, "HelmRepoConfig": { - "description": "Helm repository configuration", + "description": "Helm repository configuration.", "properties": { "repo_auth_flags": { "allOf": [ @@ -85,7 +85,7 @@ "type": "object" }, "InputTopicTypes": { - "description": "Input topic types\n\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)", + "description": "Input topic types.\n\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)", "enum": [ "input", "pattern" @@ -97,7 +97,7 @@ "additionalProperties": { "type": "string" }, - "description": "Settings specific to Kafka Connectors", + "description": "Settings specific to Kafka Connectors.", "properties": { "connector.class": { "title": "Connector.Class", @@ -111,7 +111,7 @@ "type": "object" }, "KafkaSinkConnector": { - "description": "Kafka sink connector model", + "description": "Kafka sink connector model.", "properties": { "app": { "allOf": [ @@ -183,7 +183,7 @@ }, "type": { "default": "kafka-sink-connector", - "description": "Kafka sink connector model", + "description": "Kafka sink connector model.", "enum": [ "kafka-sink-connector" ], @@ -206,7 +206,7 @@ "type": "object" }, "KafkaSourceConnector": { - "description": "Kafka source connector model", + "description": "Kafka source connector model.", "properties": { "app": { "allOf": [ @@ -283,7 +283,7 @@ }, "type": { "default": "kafka-source-connector", - "description": "Kafka source connector model", + "description": "Kafka source connector model.", "enum": [ "kafka-source-connector" ], @@ -384,13 +384,13 @@ "type": "object" }, "KubernetesAppConfig": { - "description": "Settings specific to Kubernetes Apps", + "description": "Settings specific to Kubernetes Apps.", "properties": {}, "title": "KubernetesAppConfig", "type": "object" }, "OutputTopicTypes": { - "description": "Types of output topic\n\nOUTPUT (output topic), ERROR (error topic)", + "description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)", "enum": [ "output", "error" @@ -399,7 +399,7 @@ "type": "string" }, "ProducerApp": { - "description": "Producer component\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", + "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ @@ -462,7 +462,7 @@ }, "type": { "default": "producer-app", - "description": "Producer component\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", + "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", "enum": [ "producer-app" ], @@ -485,7 +485,7 @@ "type": "object" }, "ProducerStreamsConfig": { - "description": "Kafka Streams settings specific to Producer", + "description": "Kafka Streams settings specific to Producer.", "properties": { "brokers": { "description": "Brokers", @@ -519,7 +519,7 @@ "type": "object" }, "ProducerValues": { - "description": "Settings specific to producers", + "description": "Settings specific to producers.", "properties": { "nameOverride": { "description": "Override name with this value", @@ -543,7 +543,7 @@ "type": "object" }, "RepoAuthFlags": { - "description": "Authorisation-related flags for `helm repo`", + "description": "Authorisation-related flags for `helm repo`.", "properties": { "ca_file": { "description": "Path to CA bundle file to verify certificates of HTTPS-enabled servers", @@ -578,7 +578,7 @@ "type": "object" }, "StreamsApp": { - "description": "StreamsApp component that configures a streams bootstrap app", + "description": "StreamsApp component that configures a streams bootstrap app.", "properties": { "app": { "allOf": [ @@ -645,7 +645,7 @@ }, "type": { "default": "streams-app", - "description": "StreamsApp component that configures a streams bootstrap app", + "description": "StreamsApp component that configures a streams bootstrap app.", "enum": [ "streams-app" ], @@ -668,7 +668,7 @@ "type": "object" }, "StreamsAppAutoScaling": { - "description": "Kubernetes Event-driven Autoscaling config", + "description": "Kubernetes Event-driven Autoscaling config.", "properties": { "consumerGroup": { "description": "Name of the consumer group used for checking the offset on the topic and processing the related lag.", @@ -771,7 +771,7 @@ "type": "object" }, "StreamsConfig": { - "description": "Streams Bootstrap streams section", + "description": "Streams Bootstrap streams section.", "properties": { "brokers": { "description": "Brokers", @@ -854,7 +854,7 @@ "type": "object" }, "ToSection": { - "description": "Holds multiple output topics", + "description": "Holds multiple output topics.", "properties": { "models": { "additionalProperties": { @@ -880,7 +880,7 @@ }, "TopicConfig": { "additionalProperties": false, - "description": "Configure an output topic", + "description": "Configure an output topic.", "properties": { "configs": { "additionalProperties": { diff --git a/kpops/cli/custom_formatter.py b/kpops/cli/custom_formatter.py index ef977d24f..fb5e44057 100644 --- a/kpops/cli/custom_formatter.py +++ b/kpops/cli/custom_formatter.py @@ -16,7 +16,9 @@ def format(self, record): logging.WARNING: typer.style(message_format, fg=typer.colors.YELLOW), logging.ERROR: typer.style(message_format, fg=typer.colors.RED), logging.CRITICAL: typer.style( - message_format, fg=typer.colors.RED, bold=True, + message_format, + fg=typer.colors.RED, + bold=True, ), } diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 5fbbe3888..cc5ebf65d 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -1,10 +1,10 @@ from __future__ import annotations import logging +from collections.abc import Iterator from enum import Enum from pathlib import Path from typing import TYPE_CHECKING, Optional -from collections.abc import Iterator import dtyper import typer @@ -121,12 +121,17 @@ def setup_pipeline( handlers = setup_handlers(components_module, pipeline_config) return Pipeline.load_from_yaml( - pipeline_base_dir, pipeline_path, registry, pipeline_config, handlers, + pipeline_base_dir, + pipeline_path, + registry, + pipeline_config, + handlers, ) def setup_handlers( - components_module: str | None, config: PipelineConfig, + components_module: str | None, + config: PipelineConfig, ) -> ComponentHandlers: schema_handler = SchemaHandler.load_schema_handler(components_module, config) connector_handler = KafkaConnectHandler.from_pipeline_config(config) @@ -149,7 +154,9 @@ def get_step_names(steps_to_apply: list[PipelineComponent]) -> list[str]: def filter_steps_to_apply( - pipeline: Pipeline, steps: set[str], filter_type: FilterType, + pipeline: Pipeline, + steps: set[str], + filter_type: FilterType, ) -> list[PipelineComponent]: def is_in_steps(component: PipelineComponent) -> bool: return component.name in steps @@ -171,7 +178,9 @@ def is_in_steps(component: PipelineComponent) -> bool: def get_steps_to_apply( - pipeline: Pipeline, steps: str | None, filter_type: FilterType, + pipeline: Pipeline, + steps: str | None, + filter_type: FilterType, ) -> list[PipelineComponent]: if steps: return filter_steps_to_apply(pipeline, parse_steps(steps), filter_type) @@ -179,7 +188,9 @@ def get_steps_to_apply( def reverse_pipeline_steps( - pipeline: Pipeline, steps: str | None, filter_type: FilterType, + pipeline: Pipeline, + steps: str | None, + filter_type: FilterType, ) -> Iterator[PipelineComponent]: return reversed(get_steps_to_apply(pipeline, steps, filter_type)) @@ -193,7 +204,9 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_pipeline_config( - config: Path, defaults: Optional[Path], verbose: bool, + config: Path, + defaults: Optional[Path], + verbose: bool, ) -> PipelineConfig: setup_logging_level(verbose) PipelineConfig.Config.config_path = config @@ -225,7 +238,8 @@ def schema( ), components_module: Optional[str] = COMPONENTS_MODULES, include_stock_components: bool = typer.Option( - default=True, help="Include the built-in KPOps components.", + default=True, + help="Include the built-in KPOps components.", ), ) -> None: match scope: @@ -251,7 +265,10 @@ def generate( ) -> Pipeline: pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config, + pipeline_base_dir, + pipeline_path, + components_module, + pipeline_config, ) if not template: @@ -286,7 +303,10 @@ def deploy( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config, + pipeline_base_dir, + pipeline_path, + components_module, + pipeline_config, ) steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) @@ -311,7 +331,10 @@ def destroy( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config, + pipeline_base_dir, + pipeline_path, + components_module, + pipeline_config, ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -335,7 +358,10 @@ def reset( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config, + pipeline_base_dir, + pipeline_path, + components_module, + pipeline_config, ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -360,7 +386,10 @@ def clean( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config, + pipeline_base_dir, + pipeline_path, + components_module, + pipeline_config, ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index eb30b7c99..58e731db2 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -112,7 +112,8 @@ def customise_sources( env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> tuple[ - SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], ..., + SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], + ..., ]: return ( env_settings, diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index 8b6c99522..838c736d7 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -2,11 +2,10 @@ import importlib import inspect -import os -from pathlib import Path import sys from collections.abc import Iterator from dataclasses import dataclass, field +from pathlib import Path from typing import TypeVar from kpops import __name__ diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 0086f3d40..0dd6b26bc 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -4,8 +4,7 @@ import re import subprocess import tempfile -from collections.abc import Iterator -from collections.abc import Iterable +from collections.abc import Iterable, Iterator import yaml diff --git a/kpops/component_handlers/helm_wrapper/helm_diff.py b/kpops/component_handlers/helm_wrapper/helm_diff.py index 74e2bd36b..26de5613a 100644 --- a/kpops/component_handlers/helm_wrapper/helm_diff.py +++ b/kpops/component_handlers/helm_wrapper/helm_diff.py @@ -1,6 +1,5 @@ import logging -from collections.abc import Iterator -from collections.abc import Iterable +from collections.abc import Iterable, Iterator from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig, HelmTemplate from kpops.utils.dict_differ import Change, render_diff diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 8c6c09c32..dce229fa0 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -1,6 +1,6 @@ +from collections.abc import Iterator from dataclasses import dataclass from pathlib import Path -from collections.abc import Iterator import yaml from pydantic import BaseConfig, BaseModel, Extra, Field @@ -31,19 +31,24 @@ class RepoAuthFlags(BaseModel): """ username: str | None = Field( - default=None, description=describe_attr("username", __doc__), + default=None, + description=describe_attr("username", __doc__), ) password: str | None = Field( - default=None, description=describe_attr("password", __doc__), + default=None, + description=describe_attr("password", __doc__), ) ca_file: Path | None = Field( - default=None, description=describe_attr("ca_file", __doc__), + default=None, + description=describe_attr("ca_file", __doc__), ) cert_file: Path | None = Field( - default=None, description=describe_attr("cert_file", __doc__), + default=None, + description=describe_attr("cert_file", __doc__), ) insecure_skip_tls_verify: bool = Field( - default=False, description=describe_attr("insecure_skip_tls_verify", __doc__), + default=False, + description=describe_attr("insecure_skip_tls_verify", __doc__), ) class Config(DescConfig): @@ -73,11 +78,13 @@ class HelmRepoConfig(BaseModel): """ repository_name: str = Field( - default=..., description=describe_attr("repository_name", __doc__), + default=..., + description=describe_attr("repository_name", __doc__), ) url: str = Field(default=..., description=describe_attr("url", __doc__)) repo_auth_flags: RepoAuthFlags = Field( - default=RepoAuthFlags(), description=describe_attr("repo_auth_flags", __doc__), + default=RepoAuthFlags(), + description=describe_attr("repo_auth_flags", __doc__), ) class Config(DescConfig): @@ -207,7 +214,7 @@ def __iter__(self) -> Iterator[str]: @property def manifest(self) -> str: """Reads the manifest section of Helm stdout. - + `helm upgrade --install` output message contains three sections in the following order: - HOOKS diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index df1a36bd7..35c043dd1 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -36,7 +36,8 @@ def host(self) -> str: return self._host def create_connector( - self, connector_config: KafkaConnectorConfig, + self, + connector_config: KafkaConnectorConfig, ) -> KafkaConnectResponse: """Create a new connector. @@ -47,7 +48,9 @@ def create_connector( config_json = connector_config.dict() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self._host}/connectors", headers=HEADERS, json=connect_data, + url=f"{self._host}/connectors", + headers=HEADERS, + json=connect_data, ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -71,7 +74,8 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: :return: Information about the connector. """ response = httpx.get( - url=f"{self._host}/connectors/{connector_name}", headers=HEADERS, + url=f"{self._host}/connectors/{connector_name}", + headers=HEADERS, ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -89,10 +93,11 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: raise KafkaConnectError(response) def update_connector_config( - self, connector_config: KafkaConnectorConfig, + self, + connector_config: KafkaConnectorConfig, ) -> KafkaConnectResponse: """Create or update a connector. - + Create a new connector using the given configuration,or update the configuration for an existing connector. @@ -124,7 +129,8 @@ def update_connector_config( raise KafkaConnectError(response) def validate_connector_config( - self, connector_config: KafkaConnectorConfig, + self, + connector_config: KafkaConnectorConfig, ) -> list[str]: """Validate connector config using the given configuration. @@ -163,7 +169,8 @@ def delete_connector(self, connector_name: str) -> None: :raises ConnectorNotFoundException: Connector not found """ response = httpx.delete( - url=f"{self._host}/connectors/{connector_name}", headers=HEADERS, + url=f"{self._host}/connectors/{connector_name}", + headers=HEADERS, ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 27aad212f..4c21f7127 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -34,7 +34,10 @@ def __init__( self._timeout = timeout def create_connector( - self, connector_config: KafkaConnectorConfig, *, dry_run: bool, + self, + connector_config: KafkaConnectorConfig, + *, + dry_run: bool, ) -> None: """Create a connector. @@ -90,7 +93,8 @@ def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None: ) def __dry_run_connector_creation( - self, connector_config: KafkaConnectorConfig, + self, + connector_config: KafkaConnectorConfig, ) -> None: connector_name = connector_config.name try: diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index 8dc7c865a..398ace4e4 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -1,8 +1,8 @@ import asyncio import logging from asyncio import TimeoutError -from typing import TypeVar from collections.abc import Callable +from typing import TypeVar log = logging.getLogger("Timeout") diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index 9bf068438..b402c6024 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -43,7 +43,9 @@ def schema_provider(self) -> SchemaProvider: @classmethod def load_schema_handler( - cls, components_module: str | None, config: PipelineConfig, + cls, + components_module: str | None, + config: PipelineConfig, ) -> SchemaHandler | None: if not config.schema_registry_url: return None @@ -59,14 +61,19 @@ def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: key_schema_class = config.key_schema if value_schema_class is not None: schema = self.schema_provider.provide_schema( - value_schema_class, to_section.models, + value_schema_class, + to_section.models, ) self.__submit_value_schema( - schema, value_schema_class, dry_run, topic_name, + schema, + value_schema_class, + dry_run, + topic_name, ) if key_schema_class is not None: schema = self.schema_provider.provide_schema( - key_schema_class, to_section.models, + key_schema_class, + to_section.models, ) self.__submit_key_schema(schema, key_schema_class, dry_run, topic_name) @@ -133,12 +140,16 @@ def __subject_exists(self, subject: str) -> bool: return len(self.schema_registry_client.get_versions(subject)) > 0 def __check_compatibility( - self, schema: Schema, schema_class: str, subject: str, + self, + schema: Schema, + schema_class: str, + subject: str, ) -> None: registered_version = self.schema_registry_client.check_version(subject, schema) if registered_version is None: if not self.schema_registry_client.test_compatibility( - subject=subject, schema=schema, + subject=subject, + schema=schema, ): schema_str = ( schema.flat_schema diff --git a/kpops/component_handlers/schema_handler/schema_provider.py b/kpops/component_handlers/schema_handler/schema_provider.py index ba7990ce1..78f653270 100644 --- a/kpops/component_handlers/schema_handler/schema_provider.py +++ b/kpops/component_handlers/schema_handler/schema_provider.py @@ -13,6 +13,8 @@ class SchemaProvider(ABC): @abstractmethod def provide_schema( - self, schema_class: str, models: dict[ModelName, ModelVersion], + self, + schema_class: str, + models: dict[ModelName, ModelVersion], ) -> Schema: ... diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 1307717c9..75888de16 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -38,7 +38,8 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: topic_name=topic_name, ) differences = self.__get_topic_config_diff( - topic_config_in_cluster, topic_config.configs, + topic_config_in_cluster, + topic_config.configs, ) if differences: @@ -79,7 +80,8 @@ def delete_topics(self, to_section: ToSection, dry_run: bool) -> None: @staticmethod def __get_topic_config_diff( - cluster_config: TopicConfigResponse, current_config: dict, + cluster_config: TopicConfigResponse, + current_config: dict, ) -> list[Diff]: comparable_in_cluster_config_dict, _ = parse_rest_proxy_topic_config( cluster_config, @@ -100,7 +102,8 @@ def __dry_run_topic_creation( topic_name=topic_name, ) in_cluster_config, new_config = parse_and_compare_topic_configs( - topic_config_in_cluster, topic_config.configs, + topic_config_in_cluster, + topic_config.configs, ) if diff := render_diff(in_cluster_config, new_config): log.info(f"Config changes for topic {topic_name}:") @@ -120,7 +123,9 @@ def __dry_run_topic_creation( self.__check_partition_count(topic_in_cluster, topic_spec, effective_config) self.__check_replication_factor( - topic_in_cluster, topic_spec, effective_config, + topic_in_cluster, + topic_spec, + effective_config, ) except TopicNotFoundException: log.info( diff --git a/kpops/component_handlers/topic/utils.py b/kpops/component_handlers/topic/utils.py index 0b5576d1f..904833a28 100644 --- a/kpops/component_handlers/topic/utils.py +++ b/kpops/component_handlers/topic/utils.py @@ -6,7 +6,8 @@ def parse_and_compare_topic_configs( - topic_config_in_cluster: TopicConfigResponse, topic_config: dict, + topic_config_in_cluster: TopicConfigResponse, + topic_config: dict, ) -> tuple[dict, dict]: comparable_in_cluster_config_dict, default_configs = parse_rest_proxy_topic_config( topic_config_in_cluster, diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 61353c895..c7fd0d68f 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -93,17 +93,24 @@ def extend_with_defaults(self, **kwargs) -> dict: config: PipelineConfig = kwargs["config"] log.debug( typer.style( - "Enriching component of type ", fg=typer.colors.GREEN, bold=False, + "Enriching component of type ", + fg=typer.colors.GREEN, + bold=False, ) + typer.style( - kwargs.get("type"), fg=typer.colors.GREEN, bold=True, underline=True, + kwargs.get("type"), + fg=typer.colors.GREEN, + bold=True, + underline=True, ), ) main_default_file_path, environment_default_file_path = get_defaults_file_paths( config, ) defaults = load_defaults( - self.__class__, main_default_file_path, environment_default_file_path, + self.__class__, + main_default_file_path, + environment_default_file_path, ) return update_nested(kwargs, defaults) @@ -163,7 +170,9 @@ def defaults_from_yaml(path: Path, key: str) -> dict: """ content = load_yaml_file(path, substitution=ENV) if not isinstance(content, dict): - msg = "Default files should be structured as map ([app type] -> [default config]" + msg = ( + "Default files should be structured as map ([app type] -> [default config]" + ) raise TypeError( msg, ) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index e078c63cf..1650a9bdf 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -30,7 +30,8 @@ class KafkaStreamsConfig(BaseModel): brokers: str = Field(default=..., description=describe_attr("brokers", __doc__)) schema_registry_url: str | None = Field( - default=None, description=describe_attr("schema_registry_url", __doc__), + default=None, + description=describe_attr("schema_registry_url", __doc__), ) class Config(CamelCaseConfig, DescConfig): @@ -45,10 +46,12 @@ class KafkaAppConfig(KubernetesAppConfig): """ streams: KafkaStreamsConfig = Field( - default=..., description=describe_attr("streams", __doc__), + default=..., + description=describe_attr("streams", __doc__), ) name_override: str | None = Field( - default=None, description=describe_attr("name_override", __doc__), + default=None, + description=describe_attr("name_override", __doc__), ) @@ -89,12 +92,14 @@ def clean_up_helm_chart(self) -> str: def deploy(self, dry_run: bool) -> None: if self.to: self.handlers.topic_handler.create_topics( - to_section=self.to, dry_run=dry_run, + to_section=self.to, + dry_run=dry_run, ) if self.handlers.schema_handler: self.handlers.schema_handler.submit_schemas( - to_section=self.to, dry_run=dry_run, + to_section=self.to, + dry_run=dry_run, ) super().deploy(dry_run) @@ -113,7 +118,8 @@ def _run_clean_up_job( """ suffix = "-clean" clean_up_release_name = trim_release_name( - self.helm_release_name + suffix, suffix, + self.helm_release_name + suffix, + suffix, ) log.info(f"Uninstall old cleanup job for {clean_up_release_name}") @@ -122,7 +128,10 @@ def _run_clean_up_job( log.info(f"Init cleanup job for {clean_up_release_name}") stdout = self.__install_clean_up_job( - clean_up_release_name, suffix, values, dry_run, + clean_up_release_name, + suffix, + values, + dry_run, ) if dry_run: diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index f092979c6..a69bf3ab1 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -65,7 +65,8 @@ class KafkaConnector(PipelineComponent, ABC): description=describe_attr("repo_config", __doc__), ) version: str | None = Field( - default="1.0.4", description=describe_attr("version", __doc__), + default="1.0.4", + description=describe_attr("version", __doc__), ) resetter_values: dict = Field( default_factory=dict, @@ -138,12 +139,14 @@ def template_flags(self) -> HelmTemplateFlags: def deploy(self, dry_run: bool) -> None: if self.to: self.handlers.topic_handler.create_topics( - to_section=self.to, dry_run=dry_run, + to_section=self.to, + dry_run=dry_run, ) if self.handlers.schema_handler: self.handlers.schema_handler.submit_schemas( - to_section=self.to, dry_run=dry_run, + to_section=self.to, + dry_run=dry_run, ) self.handlers.connector_handler.create_connector(self.app, dry_run=dry_run) @@ -151,7 +154,8 @@ def deploy(self, dry_run: bool) -> None: @override def destroy(self, dry_run: bool) -> None: self.handlers.connector_handler.destroy_connector( - self.full_name, dry_run=dry_run, + self.full_name, + dry_run=dry_run, ) @override @@ -159,7 +163,8 @@ def clean(self, dry_run: bool) -> None: if self.to: if self.handlers.schema_handler: self.handlers.schema_handler.delete_schemas( - to_section=self.to, dry_run=dry_run, + to_section=self.to, + dry_run=dry_run, ) self.handlers.topic_handler.delete_topics(self.to, dry_run=dry_run) @@ -196,7 +201,9 @@ def _run_connect_resetter( if dry_run: self.dry_run_handler.print_helm_diff( - stdout, self._resetter_release_name, log, + stdout, + self._resetter_release_name, + log, ) if not retain_clean_jobs: @@ -360,7 +367,9 @@ def clean(self, dry_run: bool) -> None: self.__run_kafka_connect_resetter(dry_run, delete_consumer_group=True) def __run_kafka_connect_resetter( - self, dry_run: bool, delete_consumer_group: bool, + self, + dry_run: bool, + delete_consumer_group: bool, ) -> None: """Run the connector resetter. diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 021f6d379..2e0b44511 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -96,7 +96,9 @@ def helm_release_name(self) -> str: @property def helm_chart(self) -> str: """Return component's Helm chart.""" - msg = f"Please implement the helm_chart property of the {self.__module__} module." + msg = ( + f"Please implement the helm_chart property of the {self.__module__} module." + ) raise NotImplementedError( msg, ) diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index a53708c94..c416026c9 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -27,7 +27,8 @@ class FromTopic(BaseModel): """ type: InputTopicTypes | None = Field( - default=None, description=describe_attr("type", __doc__), + default=None, + description=describe_attr("type", __doc__), ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 843321916..d56476659 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -31,7 +31,9 @@ class TopicConfig(BaseModel): """ type: OutputTopicTypes | None = Field( - default=None, title="Topic type", description=describe_attr("type", __doc__), + default=None, + title="Topic type", + description=describe_attr("type", __doc__), ) key_schema: str | None = Field( default=None, @@ -54,7 +56,8 @@ class TopicConfig(BaseModel): description=describe_attr("replication_factor", __doc__), ) configs: dict[str, str | int] = Field( - default={}, description=describe_attr("configs", __doc__), + default={}, + description=describe_attr("configs", __doc__), ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) @@ -80,10 +83,12 @@ class ToSection(BaseModel): """ topics: dict[TopicName, TopicConfig] = Field( - default={}, description=describe_attr("topics", __doc__), + default={}, + description=describe_attr("topics", __doc__), ) models: dict[ModelName, ModelVersion] = Field( - default={}, description=describe_attr("models", __doc__), + default={}, + description=describe_attr("models", __doc__), ) class Config(DescConfig): diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index ad948bfcc..1e5348948 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -15,10 +15,12 @@ class ProducerStreamsConfig(KafkaStreamsConfig): """ extra_output_topics: dict[str, str] = Field( - default={}, description=describe_attr("extra_output_topics", __doc__), + default={}, + description=describe_attr("extra_output_topics", __doc__), ) output_topic: str | None = Field( - default=None, description=describe_attr("output_topic", __doc__), + default=None, + description=describe_attr("output_topic", __doc__), ) @@ -29,7 +31,8 @@ class ProducerValues(KafkaAppConfig): """ streams: ProducerStreamsConfig = Field( - default=..., description=describe_attr("streams", __doc__), + default=..., + description=describe_attr("streams", __doc__), ) class Config(BaseConfig): diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 28ec5059e..0433fb5dc 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,5 +1,5 @@ -from typing import Any from collections.abc import Mapping, Set +from typing import Any from pydantic import BaseConfig, BaseModel, Extra, Field from typing_extensions import override @@ -27,28 +27,36 @@ class StreamsConfig(KafkaStreamsConfig): """ input_topics: list[str] = Field( - default=[], description=describe_attr("input_topics", __doc__), + default=[], + description=describe_attr("input_topics", __doc__), ) input_pattern: str | None = Field( - default=None, description=describe_attr("input_pattern", __doc__), + default=None, + description=describe_attr("input_pattern", __doc__), ) extra_input_topics: dict[str, list[str]] = Field( - default={}, description=describe_attr("extra_input_topics", __doc__), + default={}, + description=describe_attr("extra_input_topics", __doc__), ) extra_input_patterns: dict[str, str] = Field( - default={}, description=describe_attr("extra_input_patterns", __doc__), + default={}, + description=describe_attr("extra_input_patterns", __doc__), ) extra_output_topics: dict[str, str] = Field( - default={}, description=describe_attr("extra_output_topics", __doc__), + default={}, + description=describe_attr("extra_output_topics", __doc__), ) output_topic: str | None = Field( - default=None, description=describe_attr("output_topic", __doc__), + default=None, + description=describe_attr("output_topic", __doc__), ) error_topic: str | None = Field( - default=None, description=describe_attr("error_topic", __doc__), + default=None, + description=describe_attr("error_topic", __doc__), ) config: dict[str, str] = Field( - default={}, description=describe_attr("config", __doc__), + default={}, + description=describe_attr("config", __doc__), ) def add_input_topics(self, topics: list[str]) -> None: @@ -76,8 +84,8 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: def dict( self, *, - include: None | Set [int | str] | Mapping[int | str, Any] = None, - exclude: None | Set [int | str] | Mapping[int | str, Any] = None, + include: None | Set[int | str] | Mapping[int | str, Any] = None, + exclude: None | Set[int | str] | Mapping[int | str, Any] = None, by_alias: bool = False, skip_defaults: bool | None = None, exclude_unset: bool = False, diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 4475acf16..c95a7e904 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -184,7 +184,9 @@ def parse_components(self, component_list: list[dict]) -> None: raise ParsingException from ex def apply_component( - self, component_class: type[PipelineComponent], component_data: dict, + self, + component_class: type[PipelineComponent], + component_data: dict, ) -> None: """Instantiate, enrich and inflate pipeline component. @@ -218,7 +220,8 @@ def apply_component( inflated_from_component.name, ) enriched_component.weave_from_topics( - resolved_from_component.to, from_topic, + resolved_from_component.to, + from_topic, ) elif self.components: # read from previous component @@ -302,7 +305,8 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: substitution_hardcoded, ) substitution = generate_substitution( - json.loads(config.json()), existing_substitution=component_substitution, + json.loads(config.json()), + existing_substitution=component_substitution, ) return json.loads( diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index b5760ed33..50cb5a7f6 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -1,10 +1,10 @@ from __future__ import annotations +from collections.abc import Iterable, Iterator, Sequence from dataclasses import dataclass from difflib import Differ from enum import Enum from typing import Generic, TypeVar -from collections.abc import Iterable, Iterator, Sequence import typer import yaml @@ -52,10 +52,12 @@ class Diff(Generic[T]): @staticmethod def from_dicts( - d1: dict, d2: dict, ignore: set[str] | None = None, + d1: dict, + d2: dict, + ignore: set[str] | None = None, ) -> Iterator[Diff]: for diff_type, keys, changes in diff(d1, d2, ignore=ignore): - if not isinstance(changes_tmp:=changes, list): + if not isinstance(changes_tmp := changes, list): changes_tmp = [("", changes)] for key, change in changes_tmp: yield Diff( diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index e4adf81cd..d3c173edc 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -1,5 +1,5 @@ -from typing import Any from collections.abc import Mapping +from typing import Any def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: @@ -46,7 +46,9 @@ def update_nested(*argv: dict) -> dict: def flatten_mapping( - nested_mapping: Mapping[str, Any], prefix: str | None = None, separator: str = "_", + nested_mapping: Mapping[str, Any], + prefix: str | None = None, + separator: str = "_", ) -> dict[str, Any]: """Flattens a Mapping. diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 0b104064c..c1d96ce5c 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,9 +1,9 @@ import inspect import logging from abc import ABC +from collections.abc import Sequence from enum import Enum from typing import Annotated, Any, Literal, Union -from collections.abc import Sequence from pydantic import BaseConfig, Field, schema, schema_json_of from pydantic.fields import FieldInfo, ModelField @@ -38,7 +38,8 @@ def field_schema(field: ModelField, **kwargs: Any) -> Any: def _is_valid_component( - defined_component_types: set[str], component: type[PipelineComponent], + defined_component_types: set[str], + component: type[PipelineComponent], ) -> bool: """Check whether a PipelineComponent subclass has a valid definition for the schema generation. @@ -57,7 +58,8 @@ def _is_valid_component( def _add_components( - components_module: str, components: tuple[type[PipelineComponent]] | None = None, + components_module: str, + components: tuple[type[PipelineComponent]] | None = None, ) -> tuple[type[PipelineComponent]]: """Add components to a components tuple. @@ -83,7 +85,8 @@ def _add_components( def gen_pipeline_schema( - components_module: str | None = None, include_stock_components: bool = True, + components_module: str | None = None, + include_stock_components: bool = True, ) -> None: """Generate a json schema from the models of pipeline components. @@ -125,7 +128,8 @@ def gen_pipeline_schema( ) AnnotatedPipelineComponents = Annotated[ - PipelineComponents, Field(discriminator="type"), + PipelineComponents, + Field(discriminator="type"), ] schema = schema_json_of( @@ -141,6 +145,9 @@ def gen_pipeline_schema( def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config.""" schema = schema_json_of( - PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True, + PipelineConfig, + title="KPOps config schema", + indent=4, + sort_keys=True, ) print(schema) diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml_loading.py index 36848d4d2..b587ae1e4 100644 --- a/kpops/utils/yaml_loading.py +++ b/kpops/utils/yaml_loading.py @@ -9,7 +9,8 @@ def generate_hashkey( - file_path: Path, substitution: Mapping[str, Any] | None = None, + file_path: Path, + substitution: Mapping[str, Any] | None = None, ) -> tuple: if substitution is None: substitution = {} @@ -18,7 +19,9 @@ def generate_hashkey( @cached(cache={}, key=generate_hashkey) def load_yaml_file( - file_path: Path, *, substitution: Mapping[str, Any] | None = None, + file_path: Path, + *, + substitution: Mapping[str, Any] | None = None, ) -> dict | list[dict]: with file_path.open() as yaml_file: return yaml.load(substitute(yaml_file.read(), substitution), Loader=yaml.Loader) diff --git a/tests/cli/resources/module.py b/tests/cli/resources/module.py index 4014d6ec4..3691e53e1 100644 --- a/tests/cli/resources/module.py +++ b/tests/cli/resources/module.py @@ -9,6 +9,8 @@ class CustomSchemaProvider(SchemaProvider): def provide_schema( - self, schema_class: str, models: dict[ModelName, ModelVersion], + self, + schema_class: str, + models: dict[ModelName, ModelVersion], ) -> Schema: return AvroSchema() diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index 2dd92b512..fe596c1df 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -1,13 +1,10 @@ -# -*- coding: utf-8 -*- # snapshottest: v1 - https://goo.gl/zC4yUc -from __future__ import unicode_literals from snapshottest import Snapshot - snapshots = Snapshot() -snapshots['TestGenSchema.test_gen_pipeline_schema_only_custom_module test-schema-generation'] = '''{ +snapshots["TestGenSchema.test_gen_pipeline_schema_only_custom_module test-schema-generation"] = """{ "definitions": { "EmptyPipelineComponent": { "description": "", @@ -432,4 +429,4 @@ "title": "KPOps pipeline schema", "type": "array" } -''' +""" diff --git a/tests/cli/test_pipeline_steps.py b/tests/cli/test_pipeline_steps.py index 1d1cafbf1..8b4c6c6e3 100644 --- a/tests/cli/test_pipeline_steps.py +++ b/tests/cli/test_pipeline_steps.py @@ -45,7 +45,9 @@ def log_info(mocker: MockerFixture) -> MagicMock: def tests_filter_steps_to_apply(log_info: MagicMock, pipeline: Pipeline): filtered_steps = get_steps_to_apply( - pipeline, "example2,example3", FilterType.INCLUDE, + pipeline, + "example2,example3", + FilterType.INCLUDE, ) assert len(filtered_steps) == 2 @@ -66,7 +68,9 @@ def tests_filter_steps_to_apply(log_info: MagicMock, pipeline: Pipeline): def tests_filter_steps_to_exclude(log_info: MagicMock, pipeline: Pipeline): filtered_steps = get_steps_to_apply( - pipeline, "example2,example3", FilterType.EXCLUDE, + pipeline, + "example2,example3", + FilterType.EXCLUDE, ) assert len(filtered_steps) == 1 diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index 4c0da3c37..be5b17ae6 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -9,10 +9,10 @@ from snapshottest.module import SnapshotTest from typer.testing import CliRunner -from tests.cli.resources import empty_module from kpops.cli.main import app from kpops.components.base_components import PipelineComponent from kpops.utils.docstring import describe_attr +from tests.cli.resources import empty_module RESOURCE_PATH = Path(__file__).parent / "resources" @@ -75,7 +75,8 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent): """ example_attr: str = Field( - default=..., description=describe_attr("example_attr", __doc__), + default=..., + description=describe_attr("example_attr", __doc__), ) @@ -83,7 +84,10 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent): @pytest.mark.filterwarnings( - "ignore:handlers", "ignore:config", "ignore:enrich", "ignore:validate", + "ignore:handlers", + "ignore:config", + "ignore:enrich", + "ignore:validate", ) class TestGenSchema: def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 197c1310c..6561197a1 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -50,7 +50,10 @@ def test_should_set_pipeline_name_with_absolute_base_dir(): def test_should_not_set_pipeline_name_with_the_same_base_dir(): - with pytest.raises(ValueError, match="The pipeline-base-dir should not equal the pipeline-path"): + with pytest.raises( + ValueError, + match="The pipeline-base-dir should not equal the pipeline-path", + ): Pipeline.set_pipeline_name_env_vars(PIPELINE_PATH, PIPELINE_PATH) diff --git a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py index c75883743..0f21a970c 100644 --- a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py +++ b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py @@ -42,7 +42,8 @@ def test_should_print_helm_diff_when_release_is_new( dry_run_handler.print_helm_diff("A test stdout", "a-release-name", log) helm_mock.get_manifest.assert_called_once_with( - "a-release-name", "test-namespace", + "a-release-name", + "test-namespace", ) assert "Helm release a-release-name does not exist" in caplog.text mock_load_manifest.assert_called_once_with("A test stdout") @@ -67,7 +68,8 @@ def test_should_print_helm_diff_when_release_exists( dry_run_handler.print_helm_diff("A test stdout", "a-release-name", log) helm_mock.get_manifest.assert_called_once_with( - "a-release-name", "test-namespace", + "a-release-name", + "test-namespace", ) assert "Helm release a-release-name already exists" in caplog.text mock_load_manifest.assert_called_once_with("A test stdout") diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py index d838f530c..e8870de85 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py +++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py @@ -44,7 +44,9 @@ def mock_get_version(self, mocker: MockerFixture) -> MagicMock: return mock_get_version def test_should_call_run_command_method_when_helm_install_with_defaults( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -74,7 +76,9 @@ def test_should_call_run_command_method_when_helm_install_with_defaults( ) def test_should_include_configured_tls_parameters_on_add_when_version_is_old( - self, run_command: MagicMock, mocker: MockerFixture, + self, + run_command: MagicMock, + mocker: MockerFixture, ): mock_get_version = mocker.patch.object(Helm, "get_version") mock_get_version.return_value = Version(major=3, minor=6, patch=0) @@ -104,7 +108,9 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_old( ] def test_should_include_configured_tls_parameters_on_add_when_version_is_new( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm = Helm(HelmConfig()) @@ -132,7 +138,9 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_new( ] def test_should_include_configured_tls_parameters_on_update( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.upgrade_install( @@ -168,7 +176,9 @@ def test_should_include_configured_tls_parameters_on_update( ) def test_should_call_run_command_method_when_helm_install_with_non_defaults( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.upgrade_install( @@ -213,7 +223,9 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults( ) def test_should_call_run_command_method_when_uninstalling_streams_app( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.uninstall( @@ -244,7 +256,9 @@ def test_should_log_warning_when_release_not_found( ) def test_should_call_run_command_method_when_installing_streams_app__with_dry_run( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -398,7 +412,9 @@ def test_load_manifest(self): assert helm_templates[1].template == {"foo": "bar"} def test_helm_get_manifest( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) run_command.return_value = dedent( @@ -431,7 +447,9 @@ def test_helm_get_manifest( assert helm_wrapper.get_manifest("test-release", "test-namespace") == () def test_should_call_run_command_method_when_helm_template_with_optional_args( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -469,7 +487,9 @@ def test_should_call_run_command_method_when_helm_template_with_optional_args( ) def test_should_call_run_command_method_when_helm_template_without_optional_args( - self, run_command: MagicMock, mock_get_version: MagicMock, + self, + run_command: MagicMock, + mock_get_version: MagicMock, ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -525,7 +545,8 @@ def test_should_call_helm_version( assert helm._version == expected_version def test_should_raise_exception_if_helm_version_is_old( - self, run_command: MagicMock, + self, + run_command: MagicMock, ): run_command.return_value = "v2.9.0+gc9f554d" with pytest.raises( @@ -535,10 +556,12 @@ def test_should_raise_exception_if_helm_version_is_old( Helm(helm_config=HelmConfig()) def test_should_raise_exception_if_helm_version_cannot_be_parsed( - self, run_command: MagicMock, + self, + run_command: MagicMock, ): run_command.return_value = "123" with pytest.raises( - RuntimeError, match="Could not parse the Helm version.\n\nHelm output:\n123", + RuntimeError, + match="Could not parse the Helm version.\n\nHelm output:\n123", ): Helm(helm_config=HelmConfig()) diff --git a/tests/component_handlers/kafka_connect/test_connect_handler.py b/tests/component_handlers/kafka_connect/test_connect_handler.py index 172f1dee0..fe6bc473e 100644 --- a/tests/component_handlers/kafka_connect/test_connect_handler.py +++ b/tests/component_handlers/kafka_connect/test_connect_handler.py @@ -163,7 +163,9 @@ def test_should_log_correct_message_when_create_connector_and_connector_exists_i ] def test_should_log_invalid_config_when_create_connector_in_dry_run( - self, connector_config: KafkaConnectorConfig, renderer_diff_mock: MagicMock, + self, + connector_config: KafkaConnectorConfig, + renderer_diff_mock: MagicMock, ): connector_wrapper = MagicMock() @@ -188,7 +190,8 @@ def test_should_log_invalid_config_when_create_connector_in_dry_run( ) def test_should_call_update_connector_config_when_connector_exists_not_dry_run( - self, connector_config: KafkaConnectorConfig, + self, + connector_config: KafkaConnectorConfig, ): connector_wrapper = MagicMock() handler = self.connector_handler(connector_wrapper) @@ -201,7 +204,8 @@ def test_should_call_update_connector_config_when_connector_exists_not_dry_run( ] def test_should_call_create_connector_when_connector_does_not_exists_not_dry_run( - self, connector_config: KafkaConnectorConfig, + self, + connector_config: KafkaConnectorConfig, ): connector_wrapper = MagicMock() diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index afac5538d..1b1793109 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -58,7 +58,8 @@ def test_should_through_exception_when_host_is_not_set(self): @patch("httpx.post") def test_should_create_post_requests_for_given_connector_configuration( - self, mock_post: MagicMock, + self, + mock_post: MagicMock, ): configs = { "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", @@ -84,7 +85,9 @@ def test_should_create_post_requests_for_given_connector_configuration( ) def test_should_return_correct_response_when_connector_created( - self, httpx_mock: HTTPXMock, connector_config: KafkaConnectorConfig, + self, + httpx_mock: HTTPXMock, + connector_config: KafkaConnectorConfig, ): actual_response = { "name": "hdfs-sink-connector", @@ -152,7 +155,9 @@ def test_should_create_correct_get_connector_request(self, mock_get: MagicMock): @pytest.mark.flaky(reruns=5, condition=sys.platform.startswith("win32")) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_return_correct_response_when_getting_connector( - self, log_info: MagicMock, httpx_mock: HTTPXMock, + self, + log_info: MagicMock, + httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -187,7 +192,9 @@ def test_should_return_correct_response_when_getting_connector( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_raise_connector_not_found_when_getting_connector( - self, log_info: MagicMock, httpx_mock: HTTPXMock, + self, + log_info: MagicMock, + httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -207,7 +214,9 @@ def test_should_raise_connector_not_found_when_getting_connector( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") def test_should_raise_rebalance_in_progress_when_getting_connector( - self, log_warning: MagicMock, httpx_mock: HTTPXMock, + self, + log_warning: MagicMock, + httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -362,7 +371,8 @@ def test_should_raise_connector_exists_exception_when_update_connector( @patch("httpx.delete") def test_should_create_correct_delete_connector_request( - self, mock_delete: MagicMock, + self, + mock_delete: MagicMock, ): connector_name = "test-connector" with pytest.raises(KafkaConnectError): @@ -375,7 +385,9 @@ def test_should_create_correct_delete_connector_request( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_return_correct_response_when_deleting_connector( - self, log_info: MagicMock, httpx_mock: HTTPXMock, + self, + log_info: MagicMock, + httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -410,7 +422,9 @@ def test_should_return_correct_response_when_deleting_connector( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_raise_connector_not_found_when_deleting_connector( - self, log_info: MagicMock, httpx_mock: HTTPXMock, + self, + log_info: MagicMock, + httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -430,7 +444,9 @@ def test_should_raise_connector_not_found_when_deleting_connector( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") def test_should_raise_rebalance_in_progress_when_deleting_connector( - self, log_warning: MagicMock, httpx_mock: HTTPXMock, + self, + log_warning: MagicMock, + httpx_mock: HTTPXMock, ): connector_name = "test-connector" @@ -453,7 +469,8 @@ def test_should_raise_rebalance_in_progress_when_deleting_connector( @patch("httpx.put") def test_should_create_correct_validate_connector_config_request( - self, mock_put: MagicMock, + self, + mock_put: MagicMock, ): connector_config = KafkaConnectorConfig( **{ @@ -474,7 +491,8 @@ def test_should_create_correct_validate_connector_config_request( @patch("httpx.put") def test_should_create_correct_validate_connector_config_and_name_gets_added( - self, mock_put: MagicMock, + self, + mock_put: MagicMock, ): connector_name = "FileStreamSinkConnector" configs = { diff --git a/tests/component_handlers/schema_handler/resources/module.py b/tests/component_handlers/schema_handler/resources/module.py index 4223179d3..7be7b4fca 100644 --- a/tests/component_handlers/schema_handler/resources/module.py +++ b/tests/component_handlers/schema_handler/resources/module.py @@ -9,6 +9,8 @@ class CustomSchemaProvider(SchemaProvider): def provide_schema( - self, schema_class: str, models: dict[ModelName, ModelVersion], + self, + schema_class: str, + models: dict[ModelName, ModelVersion], ) -> Schema: return AvroSchema({}) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 51ff483ca..df516de19 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -96,16 +96,19 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): schema_registry_url="http://localhost:8081", ) schema_handler = SchemaHandler.load_schema_handler( - TEST_SCHEMA_PROVIDER_MODULE, config_enable, + TEST_SCHEMA_PROVIDER_MODULE, + config_enable, ) assert schema_handler is not None schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", {}, + "com.bakdata.kpops.test.SchemaHandlerTest", + {}, ) schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SomeOtherSchemaClass", {}, + "com.bakdata.kpops.test.SomeOtherSchemaClass", + {}, ) find_class_mock.assert_called_once_with(TEST_SCHEMA_PROVIDER_MODULE, SchemaProvider) @@ -113,19 +116,22 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): def test_should_raise_value_error_if_schema_provider_class_not_found(): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=NON_EXISTING_PROVIDER_MODULE, + url="http://mock:8081", + components_module=NON_EXISTING_PROVIDER_MODULE, ) with pytest.raises( ValueError, match="No schema provider found in components module pydantic.main. " - "Please implement the abstract method in " - f"{SchemaProvider.__module__}.{SchemaProvider.__name__}." + "Please implement the abstract method in " + f"{SchemaProvider.__module__}.{SchemaProvider.__name__}.", ): schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", {}, + "com.bakdata.kpops.test.SchemaHandlerTest", + {}, ) + @pytest.mark.parametrize( ("components_module"), [ @@ -137,9 +143,11 @@ def test_should_raise_value_error_if_schema_provider_class_not_found(): "", id="components_module = ''", ), - ] + ], ) -def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty(components_module): +def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty( + components_module, +): config_enable = PipelineConfig( defaults_path=Path("fake"), environment="development", @@ -147,17 +155,24 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_ ) schema_handler = SchemaHandler.load_schema_handler(components_module, config_enable) assert schema_handler is not None - with pytest.raises(ValueError, match="The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your SchemaProvider implementation exists."): + with pytest.raises( + ValueError, + match="The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your SchemaProvider implementation exists.", + ): schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", {}, + "com.bakdata.kpops.test.SchemaHandlerTest", + {}, ) def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( - to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, + to_section: ToSection, + log_info_mock: MagicMock, + schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -177,7 +192,8 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [1, 2, 3] @@ -199,7 +215,8 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -238,7 +255,8 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) @@ -274,7 +292,8 @@ def test_should_submit_non_existing_schema_when_not_dry( schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -288,7 +307,8 @@ def test_should_submit_non_existing_schema_when_not_dry( schema_registry_mock.get_versions.assert_not_called() schema_registry_mock.register.assert_called_once_with( - subject=subject, schema=schema, + subject=subject, + schema=schema, ) @@ -298,7 +318,8 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -313,10 +334,12 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( def test_should_delete_schemas_when_not_in_dry_run( - to_section: ToSection, schema_registry_mock: MagicMock, + to_section: ToSection, + schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index 73e8aab4d..f0e121dd7 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -32,7 +32,9 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def _setup(self, httpx_mock: HTTPXMock): config = PipelineConfig( - defaults_path=DEFAULTS_PATH, environment="development", kafka_rest_host=HOST, + defaults_path=DEFAULTS_PATH, + environment="development", + kafka_rest_host=HOST, ) self.proxy_wrapper = ProxyWrapper(pipeline_config=config) @@ -53,12 +55,16 @@ def _setup(self, httpx_mock: HTTPXMock): def test_should_raise_exception_when_host_is_not_set(self): config = PipelineConfig(defaults_path=DEFAULTS_PATH, environment="development") config.kafka_rest_host = None - with pytest.raises(ValueError, match="The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST."): + with pytest.raises( + ValueError, + match="The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST.", + ): ProxyWrapper(pipeline_config=config) @patch("httpx.post") def test_should_create_topic_with_all_topic_configuration( - self, mock_post: MagicMock, + self, + mock_post: MagicMock, ): topic_spec = { "topic_name": "topic-X", @@ -151,7 +157,9 @@ def test_should_call_get_broker_config(self, mock_get: MagicMock): ) def test_should_log_topic_creation( - self, log_info_mock: MagicMock, httpx_mock: HTTPXMock, + self, + log_info_mock: MagicMock, + httpx_mock: HTTPXMock, ): topic_spec = { "topic_name": "topic-X", @@ -174,7 +182,9 @@ def test_should_log_topic_creation( log_info_mock.assert_called_once_with("Topic topic-X created.") def test_should_log_topic_deletion( - self, log_info_mock: MagicMock, httpx_mock: HTTPXMock, + self, + log_info_mock: MagicMock, + httpx_mock: HTTPXMock, ): topic_name = "topic-X" @@ -221,7 +231,9 @@ def test_should_get_topic(self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock assert get_topic_response == topic_response def test_should_rais_topic_not_found_exception_get_topic( - self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock, + self, + log_debug_mock: MagicMock, + httpx_mock: HTTPXMock, ): topic_name = "topic-X" @@ -240,7 +252,9 @@ def test_should_rais_topic_not_found_exception_get_topic( log_debug_mock.assert_any_call("Topic topic-X not found.") def test_should_log_reset_default_topic_config_when_deleted( - self, log_info_mock: MagicMock, httpx_mock: HTTPXMock, + self, + log_info_mock: MagicMock, + httpx_mock: HTTPXMock, ): topic_name = "topic-X" config_name = "cleanup.policy" diff --git a/tests/component_handlers/topic/test_topic_handler.py b/tests/component_handlers/topic/test_topic_handler.py index 6ca8410e2..aeb04f6c0 100644 --- a/tests/component_handlers/topic/test_topic_handler.py +++ b/tests/component_handlers/topic/test_topic_handler.py @@ -77,7 +77,8 @@ def get_topic_response_mock(self) -> MagicMock: @pytest.fixture(autouse=True) def get_default_topic_response_mock(self) -> MagicMock: with Path( - DEFAULTS_PATH / "kafka_rest_proxy_responses/get_default_topic_response.json", + DEFAULTS_PATH + / "kafka_rest_proxy_responses/get_default_topic_response.json", ).open() as f: response = json.load(f) @@ -120,7 +121,8 @@ def test_should_call_create_topic_with_dry_run_false(self): wrapper.__dry_run_topic_creation.assert_not_called() def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_false( - self, get_topic_response_mock: MagicMock, + self, + get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -146,7 +148,9 @@ def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_fals wrapper.__dry_run_topic_creation.assert_not_called() def test_should_update_topic_config_when_one_config_changed( - self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, + self, + log_info_mock: MagicMock, + get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -168,7 +172,9 @@ def test_should_update_topic_config_when_one_config_changed( ) def test_should_not_update_topic_config_when_config_not_changed( - self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, + self, + log_info_mock: MagicMock, + get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -190,7 +196,9 @@ def test_should_not_update_topic_config_when_config_not_changed( ) def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( - self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, + self, + log_info_mock: MagicMock, + get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -211,7 +219,8 @@ def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( ) def test_should_call_reset_topic_config_when_topic_exists_dry_run_false_and_topic_configs_change( - self, get_topic_response_mock: MagicMock, + self, + get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -251,7 +260,8 @@ def test_should_not_call_create_topics_with_dry_run_true_and_topic_not_exists(se wrapper.create_topic.assert_not_called() def test_should_print_message_with_dry_run_true_and_topic_not_exists( - self, log_info_mock: MagicMock, + self, + log_info_mock: MagicMock, ): wrapper = MagicMock() wrapper.get_topic.side_effect = TopicNotFoundException() @@ -350,7 +360,8 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition ] def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( - self, get_topic_response_mock: MagicMock, + self, + get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -372,7 +383,8 @@ def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( - self, get_topic_response_mock: MagicMock, + self, + get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -394,7 +406,9 @@ def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff def test_should_log_correct_message_when_delete_existing_topic_dry_run( - self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock, + self, + log_info_mock: MagicMock, + get_topic_response_mock: MagicMock, ): wrapper = get_topic_response_mock @@ -418,7 +432,8 @@ def test_should_log_correct_message_when_delete_existing_topic_dry_run( ) def test_should_log_correct_message_when_delete_non_existing_topic_dry_run( - self, log_warning_mock: MagicMock, + self, + log_warning_mock: MagicMock, ): wrapper = MagicMock() wrapper.get_topic.side_effect = TopicNotFoundException @@ -460,7 +475,8 @@ def test_should_call_delete_topic_not_dry_run(self): ] def test_should_print_correct_warning_when_deleting_topic_that_does_not_exists_not_dry_run( - self, log_warning_mock: MagicMock, + self, + log_warning_mock: MagicMock, ): wrapper = MagicMock() topic_handler = TopicHandler(proxy_wrapper=wrapper) diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index 6c78d5fa1..176303851 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -77,7 +77,9 @@ class TestBaseDefaultsComponent: ], ) def test_load_defaults( - self, component_class: type[BaseDefaultsComponent], defaults: dict, + self, + component_class: type[BaseDefaultsComponent], + defaults: dict, ): assert ( load_defaults(component_class, DEFAULTS_PATH / "defaults.yaml") == defaults @@ -105,7 +107,9 @@ def test_load_defaults( ], ) def test_load_defaults_with_environment( - self, component_class: type[BaseDefaultsComponent], defaults: dict, + self, + component_class: type[BaseDefaultsComponent], + defaults: dict, ): assert ( load_defaults( @@ -117,7 +121,9 @@ def test_load_defaults_with_environment( ) def test_inherit_defaults( - self, config: PipelineConfig, handlers: ComponentHandlers, + self, + config: PipelineConfig, + handlers: ComponentHandlers, ): component = Child(config=config, handlers=handlers) @@ -161,7 +167,9 @@ def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): ), "Defaults in code should be kept for parents" def test_multiple_generations( - self, config: PipelineConfig, handlers: ComponentHandlers, + self, + config: PipelineConfig, + handlers: ComponentHandlers, ): component = GrandChild(config=config, handlers=handlers) @@ -183,7 +191,9 @@ def test_multiple_generations( assert component.grand_child == "grand-child-value" def test_env_var_substitution( - self, config: PipelineConfig, handlers: ComponentHandlers, + self, + config: PipelineConfig, + handlers: ComponentHandlers, ): ENV["pipeline_name"] = str(DEFAULTS_PATH) component = EnvVarTest(config=config, handlers=handlers) diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index 18de732af..d39d2f6bc 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -80,7 +80,8 @@ def test_should_deploy_kafka_app( ) helm_upgrade_install = mocker.patch.object(kafka_app.helm, "upgrade_install") print_helm_diff = mocker.patch.object( - kafka_app.dry_run_handler, "print_helm_diff", + kafka_app.dry_run_handler, + "print_helm_diff", ) mocker.patch.object( KafkaApp, diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index fea299cf2..ce831d0d4 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -85,7 +85,8 @@ def test_connector_config_name_override( assert connector.app.name == CONNECTOR_FULL_NAME with pytest.raises( - ValueError, match="Connector name should be the same as component name", + ValueError, + match="Connector name should be the same as component name", ): KafkaConnector( name=CONNECTOR_NAME, @@ -96,7 +97,8 @@ def test_connector_config_name_override( ) with pytest.raises( - ValueError, match="Connector name should be the same as component name", + ValueError, + match="Connector name should be the same as component name", ): KafkaConnector( name=CONNECTOR_NAME, diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index a748e8fd7..30f02f6a4 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -55,7 +55,8 @@ def connector( to=ToSection( topics={ TopicName("${output_topic_name}"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), }, ), @@ -143,10 +144,12 @@ def test_deploy_order( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - connector.handlers.topic_handler, "create_topics", + connector.handlers.topic_handler, + "create_topics", ) mock_create_connector = mocker.patch.object( - connector.handlers.connector_handler, "create_connector", + connector.handlers.connector_handler, + "create_connector", ) mock = mocker.MagicMock() @@ -164,13 +167,15 @@ def test_destroy( mocker: MockerFixture, ): mock_destroy_connector = mocker.patch.object( - connector.handlers.connector_handler, "destroy_connector", + connector.handlers.connector_handler, + "destroy_connector", ) connector.destroy(dry_run=True) mock_destroy_connector.assert_called_once_with( - CONNECTOR_FULL_NAME, dry_run=True, + CONNECTOR_FULL_NAME, + dry_run=True, ) def test_reset_when_dry_run_is_true( @@ -191,10 +196,12 @@ def test_reset_when_dry_run_is_false( mocker: MockerFixture, ): mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics", + connector.handlers.topic_handler, + "delete_topics", ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, "clean_connector", + connector.handlers.connector_handler, + "clean_connector", ) mock = mocker.MagicMock() mock.attach_mock(mock_clean_connector, "mock_clean_connector") @@ -264,10 +271,12 @@ def test_clean_when_dry_run_is_false( mocker: MockerFixture, ): mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics", + connector.handlers.topic_handler, + "delete_topics", ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, "clean_connector", + connector.handlers.connector_handler, + "clean_connector", ) mock = mocker.MagicMock() @@ -369,10 +378,12 @@ def test_clean_without_to_when_dry_run_is_false( ) mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics", + connector.handlers.topic_handler, + "delete_topics", ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, "clean_connector", + connector.handlers.connector_handler, + "clean_connector", ) mock = mocker.MagicMock() mock.attach_mock(mock_delete_topics, "mock_delete_topics") diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index a366e736a..4ed187884 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -48,7 +48,8 @@ def connector( to=ToSection( topics={ TopicName("${output_topic_name}"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), }, ), @@ -83,11 +84,13 @@ def test_deploy_order( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - connector.handlers.topic_handler, "create_topics", + connector.handlers.topic_handler, + "create_topics", ) mock_create_connector = mocker.patch.object( - connector.handlers.connector_handler, "create_connector", + connector.handlers.connector_handler, + "create_connector", ) mock = mocker.MagicMock() @@ -108,13 +111,15 @@ def test_destroy( assert connector.handlers.connector_handler mock_destroy_connector = mocker.patch.object( - connector.handlers.connector_handler, "destroy_connector", + connector.handlers.connector_handler, + "destroy_connector", ) connector.destroy(dry_run=True) mock_destroy_connector.assert_called_once_with( - CONNECTOR_FULL_NAME, dry_run=True, + CONNECTOR_FULL_NAME, + dry_run=True, ) def test_reset_when_dry_run_is_true( @@ -137,10 +142,12 @@ def test_reset_when_dry_run_is_false( ): assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics", + connector.handlers.topic_handler, + "delete_topics", ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, "clean_connector", + connector.handlers.connector_handler, + "clean_connector", ) mock = mocker.MagicMock() @@ -210,10 +217,12 @@ def test_clean_when_dry_run_is_false( assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics", + connector.handlers.topic_handler, + "delete_topics", ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, "clean_connector", + connector.handlers.connector_handler, + "clean_connector", ) mock = mocker.MagicMock() @@ -286,10 +295,12 @@ def test_clean_without_to_when_dry_run_is_false( assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, "delete_topics", + connector.handlers.topic_handler, + "delete_topics", ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, "clean_connector", + connector.handlers.connector_handler, + "clean_connector", ) mock = mocker.MagicMock() diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index de8a4723d..a3fc7281b 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -113,7 +113,8 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( app_value: KubernetesTestValue, ): repo_config = HelmRepoConfig( - repository_name="test-repo", url="https://test.com/charts/", + repository_name="test-repo", + url="https://test.com/charts/", ) kubernetes_app = KubernetesApp( name="test-kubernetes-app", @@ -211,7 +212,9 @@ def test_should_call_helm_uninstall_when_destroying_kubernetes_app( kubernetes_app.destroy(True) helm_mock.uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-test-kubernetes-app", True, + "test-namespace", + "${pipeline_name}-test-kubernetes-app", + True, ) log_info_mock.assert_called_once_with(magentaify(stdout)) @@ -224,7 +227,8 @@ def test_should_raise_value_error_when_name_is_not_valid( repo_config: HelmRepoConfig, ): with pytest.raises( - ValueError, match=r"The component name .* is invalid for Kubernetes.", + ValueError, + match=r"The component name .* is invalid for Kubernetes.", ): KubernetesApp( name="Not-Compatible*", @@ -236,7 +240,8 @@ def test_should_raise_value_error_when_name_is_not_valid( ) with pytest.raises( - ValueError, match=r"The component name .* is invalid for Kubernetes.", + ValueError, + match=r"The component name .* is invalid for Kubernetes.", ): KubernetesApp( name="snake_case*", diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index a0da3e909..f6f4bb659 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -42,7 +42,9 @@ def config(self) -> PipelineConfig: @pytest.fixture() def producer_app( - self, config: PipelineConfig, handlers: ComponentHandlers, + self, + config: PipelineConfig, + handlers: ComponentHandlers, ) -> ProducerApp: return ProducerApp( name=self.PRODUCER_APP_NAME, @@ -58,7 +60,8 @@ def producer_app( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), }, }, @@ -79,7 +82,8 @@ def test_output_topics(self, config: PipelineConfig, handlers: ComponentHandlers "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), "extra-topic-1": TopicConfig( role="first-extra-topic", @@ -101,11 +105,13 @@ def test_deploy_order_when_dry_run_is_false( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - producer_app.handlers.topic_handler, "create_topics", + producer_app.handlers.topic_handler, + "create_topics", ) mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install", + producer_app.helm, + "upgrade_install", ) mock = mocker.MagicMock() @@ -150,7 +156,9 @@ def test_destroy( producer_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-" + self.PRODUCER_APP_NAME, True, + "test-namespace", + "${pipeline_name}-" + self.PRODUCER_APP_NAME, + True, ) def test_should_not_reset_producer_app( @@ -159,11 +167,13 @@ def test_should_not_reset_producer_app( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install", + producer_app.helm, + "upgrade_install", ) mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock_helm_print_helm_diff = mocker.patch.object( - producer_app.dry_run_handler, "print_helm_diff", + producer_app.dry_run_handler, + "print_helm_diff", ) mock = mocker.MagicMock() @@ -205,10 +215,13 @@ def test_should_not_reset_producer_app( ] def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( - self, mocker: MockerFixture, producer_app: ProducerApp, + self, + mocker: MockerFixture, + producer_app: ProducerApp, ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install", + producer_app.helm, + "upgrade_install", ) mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 1fa29ca9a..071be0095 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -47,7 +47,9 @@ def config(self) -> PipelineConfig: @pytest.fixture() def streams_app( - self, config: PipelineConfig, handlers: ComponentHandlers, + self, + config: PipelineConfig, + handlers: ComponentHandlers, ) -> StreamsApp: return StreamsApp( name=self.STREAMS_APP_NAME, @@ -61,7 +63,8 @@ def streams_app( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), }, }, @@ -113,7 +116,9 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): assert "extraInputPatterns" in streams_config def test_no_empty_input_topic( - self, config: PipelineConfig, handlers: ComponentHandlers, + self, + config: PipelineConfig, + handlers: ComponentHandlers, ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -145,7 +150,10 @@ def test_no_empty_input_topic( def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandlers): # An exception should be raised when both role and type are defined and type is input - with pytest.raises(ValueError, match="Define role only if `type` is `pattern` or `None`"): + with pytest.raises( + ValueError, + match="Define role only if `type` is `pattern` or `None`", + ): StreamsApp( name=self.STREAMS_APP_NAME, config=config, @@ -167,7 +175,10 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle ) # An exception should be raised when both role and type are defined and type is error - with pytest.raises(ValueError, match="Define `role` only if `type` is undefined"): + with pytest.raises( + ValueError, + match="Define `role` only if `type` is undefined", + ): StreamsApp( name=self.STREAMS_APP_NAME, config=config, @@ -189,7 +200,9 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle ) def test_set_streams_output_from_to( - self, config: PipelineConfig, handlers: ComponentHandlers, + self, + config: PipelineConfig, + handlers: ComponentHandlers, ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -203,10 +216,12 @@ def test_set_streams_output_from_to( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), "${error_topic_name}": TopicConfig( - type=OutputTopicTypes.ERROR, partitions_count=10, + type=OutputTopicTypes.ERROR, + partitions_count=10, ), "extra-topic-1": TopicConfig( role="first-extra-topic", @@ -228,7 +243,9 @@ def test_set_streams_output_from_to( assert streams_app.app.streams.error_topic == "${error_topic_name}" def test_weave_inputs_from_prev_component( - self, config: PipelineConfig, handlers: ComponentHandlers, + self, + config: PipelineConfig, + handlers: ComponentHandlers, ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -246,16 +263,20 @@ def test_weave_inputs_from_prev_component( ToSection( topics={ TopicName("prev-output-topic"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), TopicName("b"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), TopicName("a"): TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), TopicName("prev-error-topic"): TopicConfig( - type=OutputTopicTypes.ERROR, partitions_count=10, + type=OutputTopicTypes.ERROR, + partitions_count=10, ), }, ), @@ -281,10 +302,12 @@ def test_deploy_order_when_dry_run_is_false( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, partitions_count=10, + type=OutputTopicTypes.OUTPUT, + partitions_count=10, ), "${error_topic_name}": TopicConfig( - type=OutputTopicTypes.ERROR, partitions_count=10, + type=OutputTopicTypes.ERROR, + partitions_count=10, ), "extra-topic-1": TopicConfig( role="first-extra-topic", @@ -299,10 +322,12 @@ def test_deploy_order_when_dry_run_is_false( }, ) mock_create_topics = mocker.patch.object( - streams_app.handlers.topic_handler, "create_topics", + streams_app.handlers.topic_handler, + "create_topics", ) mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install", + streams_app.helm, + "upgrade_install", ) mock = mocker.MagicMock() @@ -351,14 +376,19 @@ def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): streams_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-" + self.STREAMS_APP_NAME, True, + "test-namespace", + "${pipeline_name}-" + self.STREAMS_APP_NAME, + True, ) def test_reset_when_dry_run_is_false( - self, streams_app: StreamsApp, mocker: MockerFixture, + self, + streams_app: StreamsApp, + mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install", + streams_app.helm, + "upgrade_install", ) mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") @@ -402,7 +432,8 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install", + streams_app.helm, + "upgrade_install", ) mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index cff924b5f..c8ef073e7 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -1,354 +1,351 @@ -# -*- coding: utf-8 -*- # snapshottest: v1 - https://goo.gl/zC4yUc -from __future__ import unicode_literals from snapshottest import Snapshot - snapshots = Snapshot() -snapshots['TestExample.test_atm_fraud atm-fraud-pipeline'] = { - 'components': [ +snapshots["TestExample.test_atm_fraud atm-fraud-pipeline"] = { + "components": [ { - 'app': { - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', - 'imageTag': '1.0.0', - 'nameOverride': 'account-producer', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'schedule': '0 12 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { + "app": { + "debug": True, + "image": "${DOCKER_REGISTRY}/atm-demo-accountproducer", + "imageTag": "1.0.0", + "nameOverride": "account-producer", + "prometheus": { + "jmx": { + "enabled": False, + }, + }, + "replicaCount": 1, + "schedule": "0 12 * * *", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { }, - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + "optimizeLeaveGroupBehavior": False, + "outputTopic": "bakdata-atm-fraud-detection-account-producer-topic", + "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", }, - 'suspend': True + "suspend": True, }, - 'name': 'account-producer', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + "name": "account-producer", + "namespace": "${NAMESPACE}", + "prefix": "", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", }, - 'to': { - 'models': { + "to": { + "models": { }, - 'topics': { - 'bakdata-atm-fraud-detection-account-producer-topic': { - 'configs': { + "topics": { + "bakdata-atm-fraud-detection-account-producer-topic": { + "configs": { }, - 'partitions_count': 3 - } - } + "partitions_count": 3, + }, + }, }, - 'type': 'producer-app', - 'version': '2.9.0' + "type": "producer-app", + "version": "2.9.0", }, { - 'app': { - 'commandLine': { - 'ITERATION': 20, - 'REAL_TX': 19 - }, - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-transactionavroproducer', - 'imageTag': '1.0.0', - 'nameOverride': 'transaction-avro-producer', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'schedule': '0 12 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { + "app": { + "commandLine": { + "ITERATION": 20, + "REAL_TX": 19, + }, + "debug": True, + "image": "${DOCKER_REGISTRY}/atm-demo-transactionavroproducer", + "imageTag": "1.0.0", + "nameOverride": "transaction-avro-producer", + "prometheus": { + "jmx": { + "enabled": False, + }, + }, + "replicaCount": 1, + "schedule": "0 12 * * *", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { }, - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + "optimizeLeaveGroupBehavior": False, + "outputTopic": "bakdata-atm-fraud-detection-transaction-avro-producer-topic", + "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", }, - 'suspend': True + "suspend": True, }, - 'name': 'transaction-avro-producer', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + "name": "transaction-avro-producer", + "namespace": "${NAMESPACE}", + "prefix": "", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", }, - 'to': { - 'models': { + "to": { + "models": { }, - 'topics': { - 'bakdata-atm-fraud-detection-transaction-avro-producer-topic': { - 'configs': { + "topics": { + "bakdata-atm-fraud-detection-transaction-avro-producer-topic": { + "configs": { }, - 'partitions_count': 3 - } - } + "partitions_count": 3, + }, + }, }, - 'type': 'producer-app', - 'version': '2.9.0' + "type": "producer-app", + "version": "2.9.0", }, { - 'app': { - 'annotations': { - 'consumerGroup': 'atm-transactionjoiner-atm-fraud-joinedtransactions-topic' - }, - 'commandLine': { - 'PRODUCTIVE': False - }, - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-transactionjoiner', - 'imageTag': '1.0.0', - 'labels': { - 'pipeline': 'bakdata-atm-fraud-detection' - }, - 'nameOverride': 'transaction-joiner', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic', - 'inputTopics': [ - 'bakdata-atm-fraud-detection-transaction-avro-producer-topic' + "app": { + "annotations": { + "consumerGroup": "atm-transactionjoiner-atm-fraud-joinedtransactions-topic", + }, + "commandLine": { + "PRODUCTIVE": False, + }, + "debug": True, + "image": "${DOCKER_REGISTRY}/atm-demo-transactionjoiner", + "imageTag": "1.0.0", + "labels": { + "pipeline": "bakdata-atm-fraud-detection", + }, + "nameOverride": "transaction-joiner", + "prometheus": { + "jmx": { + "enabled": False, + }, + }, + "replicaCount": 1, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "errorTopic": "bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic", + "inputTopics": [ + "bakdata-atm-fraud-detection-transaction-avro-producer-topic", ], - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' - } + "optimizeLeaveGroupBehavior": False, + "outputTopic": "bakdata-atm-fraud-detection-transaction-joiner-topic", + "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", + }, }, - 'name': 'transaction-joiner', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + "name": "transaction-joiner", + "namespace": "${NAMESPACE}", + "prefix": "", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", }, - 'to': { - 'models': { + "to": { + "models": { }, - 'topics': { - 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic': { - 'configs': { + "topics": { + "bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic": { + "configs": { }, - 'partitions_count': 1, - 'type': 'error' + "partitions_count": 1, + "type": "error", }, - 'bakdata-atm-fraud-detection-transaction-joiner-topic': { - 'configs': { + "bakdata-atm-fraud-detection-transaction-joiner-topic": { + "configs": { }, - 'partitions_count': 3 - } - } + "partitions_count": 3, + }, + }, }, - 'type': 'streams-app', - 'version': '2.9.0' + "type": "streams-app", + "version": "2.9.0", }, { - 'app': { - 'annotations': { - 'consumerGroup': 'atm-frauddetector-atm-fraud-possiblefraudtransactions-topic' - }, - 'commandLine': { - 'PRODUCTIVE': False - }, - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-frauddetector', - 'imageTag': '1.0.0', - 'labels': { - 'pipeline': 'bakdata-atm-fraud-detection' - }, - 'nameOverride': 'fraud-detector', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic', - 'inputTopics': [ - 'bakdata-atm-fraud-detection-transaction-joiner-topic' + "app": { + "annotations": { + "consumerGroup": "atm-frauddetector-atm-fraud-possiblefraudtransactions-topic", + }, + "commandLine": { + "PRODUCTIVE": False, + }, + "debug": True, + "image": "${DOCKER_REGISTRY}/atm-demo-frauddetector", + "imageTag": "1.0.0", + "labels": { + "pipeline": "bakdata-atm-fraud-detection", + }, + "nameOverride": "fraud-detector", + "prometheus": { + "jmx": { + "enabled": False, + }, + }, + "replicaCount": 1, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "errorTopic": "bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic", + "inputTopics": [ + "bakdata-atm-fraud-detection-transaction-joiner-topic", ], - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' - } + "optimizeLeaveGroupBehavior": False, + "outputTopic": "bakdata-atm-fraud-detection-fraud-detector-topic", + "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", + }, }, - 'name': 'fraud-detector', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + "name": "fraud-detector", + "namespace": "${NAMESPACE}", + "prefix": "", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", }, - 'to': { - 'models': { + "to": { + "models": { }, - 'topics': { - 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic': { - 'configs': { + "topics": { + "bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic": { + "configs": { }, - 'partitions_count': 1, - 'type': 'error' + "partitions_count": 1, + "type": "error", }, - 'bakdata-atm-fraud-detection-fraud-detector-topic': { - 'configs': { + "bakdata-atm-fraud-detection-fraud-detector-topic": { + "configs": { }, - 'partitions_count': 3 - } - } + "partitions_count": 3, + }, + }, }, - 'type': 'streams-app', - 'version': '2.9.0' + "type": "streams-app", + "version": "2.9.0", }, { - 'app': { - 'annotations': { - 'consumerGroup': 'atm-accountlinker-atm-fraud-output-topic' - }, - 'commandLine': { - 'PRODUCTIVE': False - }, - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-accountlinker', - 'imageTag': '1.0.0', - 'labels': { - 'pipeline': 'bakdata-atm-fraud-detection' - }, - 'nameOverride': 'account-linker', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic', - 'extraInputTopics': { - 'accounts': [ - 'bakdata-atm-fraud-detection-account-producer-topic' - ] + "app": { + "annotations": { + "consumerGroup": "atm-accountlinker-atm-fraud-output-topic", + }, + "commandLine": { + "PRODUCTIVE": False, + }, + "debug": True, + "image": "${DOCKER_REGISTRY}/atm-demo-accountlinker", + "imageTag": "1.0.0", + "labels": { + "pipeline": "bakdata-atm-fraud-detection", + }, + "nameOverride": "account-linker", + "prometheus": { + "jmx": { + "enabled": False, }, - 'inputTopics': [ - 'bakdata-atm-fraud-detection-fraud-detector-topic' + }, + "replicaCount": 1, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "errorTopic": "bakdata-atm-fraud-detection-account-linker-dead-letter-topic", + "extraInputTopics": { + "accounts": [ + "bakdata-atm-fraud-detection-account-producer-topic", + ], + }, + "inputTopics": [ + "bakdata-atm-fraud-detection-fraud-detector-topic", ], - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' - } + "optimizeLeaveGroupBehavior": False, + "outputTopic": "bakdata-atm-fraud-detection-account-linker-topic", + "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", + }, }, - 'from': { - 'components': { - 'account-producer': { - 'role': 'accounts' + "from": { + "components": { + "account-producer": { + "role": "accounts", }, - 'fraud-detector': { - 'type': 'input' - } + "fraud-detector": { + "type": "input", + }, + }, + "topics": { }, - 'topics': { - } }, - 'name': 'account-linker', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + "name": "account-linker", + "namespace": "${NAMESPACE}", + "prefix": "", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", }, - 'to': { - 'models': { + "to": { + "models": { }, - 'topics': { - 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic': { - 'configs': { + "topics": { + "bakdata-atm-fraud-detection-account-linker-dead-letter-topic": { + "configs": { }, - 'partitions_count': 1, - 'type': 'error' + "partitions_count": 1, + "type": "error", }, - 'bakdata-atm-fraud-detection-account-linker-topic': { - 'configs': { + "bakdata-atm-fraud-detection-account-linker-topic": { + "configs": { }, - 'partitions_count': 3 - } - } + "partitions_count": 3, + }, + }, }, - 'type': 'streams-app', - 'version': '2.9.0' + "type": "streams-app", + "version": "2.9.0", }, { - 'app': { - 'auto.create': True, - 'connection.ds.pool.size': 5, - 'connection.password': 'AppPassword', - 'connection.url': 'jdbc:postgresql://postgresql-dev.kpops.svc.cluster.local:5432/app_db', - 'connection.user': 'app1', - 'connector.class': 'io.confluent.connect.jdbc.JdbcSinkConnector', - 'errors.deadletterqueue.context.headers.enable': True, - 'errors.deadletterqueue.topic.name': 'postgres-request-sink-dead-letters', - 'errors.deadletterqueue.topic.replication.factor': 1, - 'errors.tolerance': 'all', - 'insert.mode': 'insert', - 'insert.mode.databaselevel': True, - 'key.converter': 'org.apache.kafka.connect.storage.StringConverter', - 'name': 'postgresql-connector', - 'pk.mode': 'record_value', - 'table.name.format': 'fraud_transactions', - 'tasks.max': 1, - 'topics': 'bakdata-atm-fraud-detection-account-linker-topic', - 'transforms': 'flatten', - 'transforms.flatten.type': 'org.apache.kafka.connect.transforms.Flatten$Value', - 'value.converter': 'io.confluent.connect.avro.AvroConverter', - 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + "app": { + "auto.create": True, + "connection.ds.pool.size": 5, + "connection.password": "AppPassword", + "connection.url": "jdbc:postgresql://postgresql-dev.kpops.svc.cluster.local:5432/app_db", + "connection.user": "app1", + "connector.class": "io.confluent.connect.jdbc.JdbcSinkConnector", + "errors.deadletterqueue.context.headers.enable": True, + "errors.deadletterqueue.topic.name": "postgres-request-sink-dead-letters", + "errors.deadletterqueue.topic.replication.factor": 1, + "errors.tolerance": "all", + "insert.mode": "insert", + "insert.mode.databaselevel": True, + "key.converter": "org.apache.kafka.connect.storage.StringConverter", + "name": "postgresql-connector", + "pk.mode": "record_value", + "table.name.format": "fraud_transactions", + "tasks.max": 1, + "topics": "bakdata-atm-fraud-detection-account-linker-topic", + "transforms": "flatten", + "transforms.flatten.type": "org.apache.kafka.connect.transforms.Flatten$Value", + "value.converter": "io.confluent.connect.avro.AvroConverter", + "value.converter.schema.registry.url": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", }, - 'name': 'postgresql-connector', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + "name": "postgresql-connector", + "namespace": "${NAMESPACE}", + "prefix": "", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/", }, - 'resetter_values': { + "resetter_values": { }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - } - ] + "type": "kafka-sink-connector", + "version": "1.0.4", + }, + ], } diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py index c2e339fbc..2a63afd83 100644 --- a/tests/pipeline/snapshots/snap_test_pipeline.py +++ b/tests/pipeline/snapshots/snap_test_pipeline.py @@ -1,2309 +1,2306 @@ -# -*- coding: utf-8 -*- # snapshottest: v1 - https://goo.gl/zC4yUc -from __future__ import unicode_literals from snapshottest import Snapshot - snapshots = Snapshot() -snapshots['TestPipeline.test_default_config test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_default_config test-pipeline"] = { + "components": [ { - 'app': { - 'nameOverride': 'resources-custom-config-app1', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-custom-config-app1', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'app1', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-custom-config-app1': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } - } - }, - 'type': 'producer-app', - 'version': '2.9.0' + "app": { + "nameOverride": "resources-custom-config-app1", + "resources": { + "limits": { + "memory": "2G", + }, + "requests": { + "memory": "2G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "resources-custom-config-app1", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "app1", + "namespace": "development-namespace", + "prefix": "resources-custom-config-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-custom-config-app1": { + "configs": { + }, + "partitions_count": 3, + "type": "output", + }, + }, + }, + "type": "producer-app", + "version": "2.9.0", }, { - 'app': { - 'image': 'some-image', - 'labels': { - 'pipeline': 'resources-custom-config' - }, - 'nameOverride': 'resources-custom-config-app2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'resources-custom-config-app2-error', - 'inputTopics': [ - 'resources-custom-config-app1' + "app": { + "image": "some-image", + "labels": { + "pipeline": "resources-custom-config", + }, + "nameOverride": "resources-custom-config-app2", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "errorTopic": "resources-custom-config-app2-error", + "inputTopics": [ + "resources-custom-config-app1", ], - 'outputTopic': 'resources-custom-config-app2', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'app2', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-custom-config-app2': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - }, - 'resources-custom-config-app2-error': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' - } - } - }, - 'type': 'streams-app', - 'version': '2.9.0' - } - ] + "outputTopic": "resources-custom-config-app2", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "app2", + "namespace": "development-namespace", + "prefix": "resources-custom-config-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-custom-config-app2": { + "configs": { + }, + "partitions_count": 3, + "type": "output", + }, + "resources-custom-config-app2-error": { + "configs": { + }, + "partitions_count": 1, + "type": "error", + }, + }, + }, + "type": "streams-app", + "version": "2.9.0", + }, + ], } -snapshots['TestPipeline.test_inflate_pipeline test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_inflate_pipeline test-pipeline"] = { + "components": [ { - 'app': { - 'commandLine': { - 'FAKE_ARG': 'fake-arg-value' - }, - 'image': 'example-registry/fake-image', - 'imageTag': '0.0.1', - 'nameOverride': 'resources-pipeline-with-inflate-scheduled-producer', - 'schedule': '30 3/8 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'scheduled-producer', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - 'com/bakdata/kafka/fake': '1.0.0' - }, - 'topics': { - 'resources-pipeline-with-inflate-scheduled-producer': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 12, - 'type': 'output', - 'value_schema': 'com.bakdata.fake.Produced' - } - } - }, - 'type': 'scheduled-producer', - 'version': '2.4.2' + "app": { + "commandLine": { + "FAKE_ARG": "fake-arg-value", + }, + "image": "example-registry/fake-image", + "imageTag": "0.0.1", + "nameOverride": "resources-pipeline-with-inflate-scheduled-producer", + "schedule": "30 3/8 * * *", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "resources-pipeline-with-inflate-scheduled-producer", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "scheduled-producer", + "namespace": "example-namespace", + "prefix": "resources-pipeline-with-inflate-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + "com/bakdata/kafka/fake": "1.0.0", + }, + "topics": { + "resources-pipeline-with-inflate-scheduled-producer": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 12, + "type": "output", + "value_schema": "com.bakdata.fake.Produced", + }, + }, + }, + "type": "scheduled-producer", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'converter-resources-pipeline-with-inflate-converter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - ] - }, - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-pipeline-with-inflate-converter', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-inflate-converter-error', - 'inputTopics': [ - 'resources-pipeline-with-inflate-scheduled-producer' + "app": { + "autoscaling": { + "consumerGroup": "converter-resources-pipeline-with-inflate-converter", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 1, + "minReplicas": 0, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ + ], + }, + "commandLine": { + "CONVERT_XML": True, + }, + "nameOverride": "resources-pipeline-with-inflate-converter", + "resources": { + "limits": { + "memory": "2G", + }, + "requests": { + "memory": "2G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-pipeline-with-inflate-converter-error", + "inputTopics": [ + "resources-pipeline-with-inflate-scheduled-producer", ], - 'outputTopic': 'resources-pipeline-with-inflate-converter', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'converter', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-inflate-converter': { - 'configs': { - 'cleanup.policy': 'compact,delete', - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-pipeline-with-inflate-converter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 10, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'converter', - 'version': '2.4.2' + "outputTopic": "resources-pipeline-with-inflate-converter", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "converter", + "namespace": "example-namespace", + "prefix": "resources-pipeline-with-inflate-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-pipeline-with-inflate-converter": { + "configs": { + "cleanup.policy": "compact,delete", + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-pipeline-with-inflate-converter-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 10, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "converter", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-pipeline-with-inflate-should-inflate', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 4, - 'minReplicas': 4, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-pipeline-with-inflate-should-inflate' - ] - }, - 'commandLine': { - 'TYPE': 'nothing' - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'resources-pipeline-with-inflate-should-inflate', - 'replicaCount': 4, - 'resources': { - 'requests': { - 'memory': '3G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-error', - 'inputTopics': [ - 'resources-pipeline-with-inflate-converter' + "app": { + "autoscaling": { + "consumerGroup": "filter-resources-pipeline-with-inflate-should-inflate", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 4, + "minReplicas": 4, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ + "resources-pipeline-with-inflate-should-inflate", ], - 'outputTopic': 'resources-pipeline-with-inflate-should-inflate', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'should-inflate', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-inflate-should-inflate': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-pipeline-with-inflate-should-inflate-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'should-inflate', - 'version': '2.4.2' + }, + "commandLine": { + "TYPE": "nothing", + }, + "image": "fake-registry/filter", + "imageTag": "2.4.1", + "nameOverride": "resources-pipeline-with-inflate-should-inflate", + "replicaCount": 4, + "resources": { + "requests": { + "memory": "3G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-pipeline-with-inflate-should-inflate-error", + "inputTopics": [ + "resources-pipeline-with-inflate-converter", + ], + "outputTopic": "resources-pipeline-with-inflate-should-inflate", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "should-inflate", + "namespace": "example-namespace", + "prefix": "resources-pipeline-with-inflate-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-pipeline-with-inflate-should-inflate": { + "configs": { + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-pipeline-with-inflate-should-inflate-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "should-inflate", + "version": "2.4.2", }, { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'resources-pipeline-with-inflate-should-inflate', - 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1' - }, - 'name': 'should-inflate-inflated-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'to': { - 'models': { - }, - 'topics': { - 'kafka-sink-connector': { - 'configs': { - }, - 'type': 'output' - }, - 'should-inflate-inflated-sink-connector': { - 'configs': { - }, - 'role': 'test' - } - } - }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + "app": { + "batch.size": "2000", + "behavior.on.malformed.documents": "warn", + "behavior.on.null.values": "delete", + "connection.compression": "true", + "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", + "key.ignore": "false", + "linger.ms": "5000", + "max.buffered.records": "20000", + "name": "resources-pipeline-with-inflate-should-inflate-inflated-sink-connector", + "read.timeout.ms": "120000", + "tasks.max": "1", + "topics": "resources-pipeline-with-inflate-should-inflate", + "transforms.changeTopic.replacement": "resources-pipeline-with-inflate-should-inflate-index-v1", + }, + "name": "should-inflate-inflated-sink-connector", + "namespace": "example-namespace", + "prefix": "resources-pipeline-with-inflate-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/", + }, + "resetter_values": { + }, + "to": { + "models": { + }, + "topics": { + "kafka-sink-connector": { + "configs": { + }, + "type": "output", + }, + "should-inflate-inflated-sink-connector": { + "configs": { + }, + "role": "test", + }, + }, + }, + "type": "kafka-sink-connector", + "version": "1.0.4", }, { - 'app': { - 'nameOverride': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error', - 'inputTopics': [ - 'kafka-sink-connector' + "app": { + "nameOverride": "resources-pipeline-with-inflate-should-inflate-inflated-streams-app", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error", + "inputTopics": [ + "kafka-sink-connector", ], - 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'should-inflate-inflated-streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - }, - 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app': { - 'configs': { - }, - 'type': 'output' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' - } - ] + "outputTopic": "resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "should-inflate-inflated-streams-app", + "namespace": "example-namespace", + "prefix": "resources-pipeline-with-inflate-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + "resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app": { + "configs": { + }, + "type": "output", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", + }, + ], } -snapshots['TestPipeline.test_kafka_connect_sink_weave_from_topics test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_kafka_connect_sink_weave_from_topics test-pipeline"] = { + "components": [ { - 'app': { - 'image': 'fake-image', - 'nameOverride': 'resources-kafka-connect-sink-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-kafka-connect-sink-streams-app-error', - 'inputTopics': [ - 'example-topic' + "app": { + "image": "fake-image", + "nameOverride": "resources-kafka-connect-sink-streams-app", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-kafka-connect-sink-streams-app-error", + "inputTopics": [ + "example-topic", ], - 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - }, - 'topics': { - 'example-topic': { - 'type': 'input' - } - } - }, - 'name': 'streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-kafka-connect-sink-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' + "outputTopic": "example-output", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + }, + "topics": { + "example-topic": { + "type": "input", + }, + }, + }, + "name": "streams-app", + "namespace": "example-namespace", + "prefix": "resources-kafka-connect-sink-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "example-output": { + "configs": { + }, + "type": "output", + }, + "resources-kafka-connect-sink-streams-app-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", }, { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-kafka-connect-sink-es-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'example-output' - }, - 'name': 'es-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - } - ] + "app": { + "batch.size": "2000", + "behavior.on.malformed.documents": "warn", + "behavior.on.null.values": "delete", + "connection.compression": "true", + "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", + "key.ignore": "false", + "linger.ms": "5000", + "max.buffered.records": "20000", + "name": "resources-kafka-connect-sink-es-sink-connector", + "read.timeout.ms": "120000", + "tasks.max": "1", + "topics": "example-output", + }, + "name": "es-sink-connector", + "namespace": "example-namespace", + "prefix": "resources-kafka-connect-sink-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/", + }, + "resetter_values": { + }, + "type": "kafka-sink-connector", + "version": "1.0.4", + }, + ], } -snapshots['TestPipeline.test_load_pipeline test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_load_pipeline test-pipeline"] = { + "components": [ { - 'app': { - 'commandLine': { - 'FAKE_ARG': 'fake-arg-value' - }, - 'image': 'example-registry/fake-image', - 'imageTag': '0.0.1', - 'nameOverride': 'resources-first-pipeline-scheduled-producer', - 'schedule': '30 3/8 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-first-pipeline-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'scheduled-producer', - 'namespace': 'example-namespace', - 'prefix': 'resources-first-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - 'com/bakdata/kafka/fake': '1.0.0' - }, - 'topics': { - 'resources-first-pipeline-scheduled-producer': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 12, - 'type': 'output', - 'value_schema': 'com.bakdata.fake.Produced' - } - } - }, - 'type': 'scheduled-producer', - 'version': '2.4.2' + "app": { + "commandLine": { + "FAKE_ARG": "fake-arg-value", + }, + "image": "example-registry/fake-image", + "imageTag": "0.0.1", + "nameOverride": "resources-first-pipeline-scheduled-producer", + "schedule": "30 3/8 * * *", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "resources-first-pipeline-scheduled-producer", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "scheduled-producer", + "namespace": "example-namespace", + "prefix": "resources-first-pipeline-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + "com/bakdata/kafka/fake": "1.0.0", + }, + "topics": { + "resources-first-pipeline-scheduled-producer": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 12, + "type": "output", + "value_schema": "com.bakdata.fake.Produced", + }, + }, + }, + "type": "scheduled-producer", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'converter-resources-first-pipeline-converter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - ] - }, - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-first-pipeline-converter', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-first-pipeline-converter-error', - 'inputTopics': [ - 'resources-first-pipeline-scheduled-producer' + "app": { + "autoscaling": { + "consumerGroup": "converter-resources-first-pipeline-converter", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 1, + "minReplicas": 0, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ ], - 'outputTopic': 'resources-first-pipeline-converter', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'converter', - 'namespace': 'example-namespace', - 'prefix': 'resources-first-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-first-pipeline-converter': { - 'configs': { - 'cleanup.policy': 'compact,delete', - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-first-pipeline-converter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 10, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'converter', - 'version': '2.4.2' + }, + "commandLine": { + "CONVERT_XML": True, + }, + "nameOverride": "resources-first-pipeline-converter", + "resources": { + "limits": { + "memory": "2G", + }, + "requests": { + "memory": "2G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-first-pipeline-converter-error", + "inputTopics": [ + "resources-first-pipeline-scheduled-producer", + ], + "outputTopic": "resources-first-pipeline-converter", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "converter", + "namespace": "example-namespace", + "prefix": "resources-first-pipeline-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-first-pipeline-converter": { + "configs": { + "cleanup.policy": "compact,delete", + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-first-pipeline-converter-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 10, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "converter", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 4, - 'minReplicas': 4, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name' - ] - }, - 'commandLine': { - 'TYPE': 'nothing' - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'replicaCount': 4, - 'resources': { - 'requests': { - 'memory': '3G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error', - 'inputTopics': [ - 'resources-first-pipeline-converter' + "app": { + "autoscaling": { + "consumerGroup": "filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 4, + "minReplicas": 4, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ + "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", + ], + }, + "commandLine": { + "TYPE": "nothing", + }, + "image": "fake-registry/filter", + "imageTag": "2.4.1", + "nameOverride": "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", + "replicaCount": 4, + "resources": { + "requests": { + "memory": "3G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error", + "inputTopics": [ + "resources-first-pipeline-converter", ], - 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'namespace': 'example-namespace', - 'prefix': 'resources-first-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'filter', - 'version': '2.4.2' - } - ] + "outputTopic": "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", + "namespace": "example-namespace", + "prefix": "resources-first-pipeline-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name": { + "configs": { + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "filter", + "version": "2.4.2", + }, + ], } -snapshots['TestPipeline.test_model_serialization test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_model_serialization test-pipeline"] = { + "components": [ { - 'app': { - 'nameOverride': 'resources-pipeline-with-paths-account-producer', - 'streams': { - 'brokers': 'test', - 'extraOutputTopics': { - }, - 'outputTopic': 'out', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'account-producer', - 'namespace': 'test', - 'prefix': 'resources-pipeline-with-paths-', - 'repo_config': { - 'repo_auth_flags': { - 'ca_file': 'my-cert.cert', - 'insecure_skip_tls_verify': False, - 'password': '$CI_JOB_TOKEN', - 'username': 'masked' - }, - 'repository_name': 'masked', - 'url': 'masked' - }, - 'type': 'producer-app', - 'version': '2.4.2' - } - ] + "app": { + "nameOverride": "resources-pipeline-with-paths-account-producer", + "streams": { + "brokers": "test", + "extraOutputTopics": { + }, + "outputTopic": "out", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "account-producer", + "namespace": "test", + "prefix": "resources-pipeline-with-paths-", + "repo_config": { + "repo_auth_flags": { + "ca_file": "my-cert.cert", + "insecure_skip_tls_verify": False, + "password": "$CI_JOB_TOKEN", + "username": "masked", + }, + "repository_name": "masked", + "url": "masked", + }, + "type": "producer-app", + "version": "2.4.2", + }, + ], } -snapshots['TestPipeline.test_no_input_topic test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_no_input_topic test-pipeline"] = { + "components": [ { - 'app': { - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-no-input-topic-pipeline-app1', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-no-input-topic-pipeline-app1-error', - 'inputPattern': '.*', - 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - }, - 'topics': { - '.*': { - 'type': 'pattern' - } - } - }, - 'name': 'app1', - 'namespace': 'example-namespace', - 'prefix': 'resources-no-input-topic-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-no-input-topic-pipeline-app1-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' + "app": { + "commandLine": { + "CONVERT_XML": True, + }, + "nameOverride": "resources-no-input-topic-pipeline-app1", + "resources": { + "limits": { + "memory": "2G", + }, + "requests": { + "memory": "2G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-no-input-topic-pipeline-app1-error", + "inputPattern": ".*", + "outputTopic": "example-output", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + }, + "topics": { + ".*": { + "type": "pattern", + }, + }, + }, + "name": "app1", + "namespace": "example-namespace", + "prefix": "resources-no-input-topic-pipeline-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "example-output": { + "configs": { + }, + "type": "output", + }, + "resources-no-input-topic-pipeline-app1-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", }, { - 'app': { - 'nameOverride': 'resources-no-input-topic-pipeline-app2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-no-input-topic-pipeline-app2-error', - 'extraOutputTopics': { - 'extra': 'example-output-extra', - 'test-output': 'test-output-extra' - }, - 'inputTopics': [ - 'example-output' + "app": { + "nameOverride": "resources-no-input-topic-pipeline-app2", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-no-input-topic-pipeline-app2-error", + "extraOutputTopics": { + "extra": "example-output-extra", + "test-output": "test-output-extra", + }, + "inputTopics": [ + "example-output", ], - 'schemaRegistryUrl': 'http://localhost:8081' - } + "schemaRegistryUrl": "http://localhost:8081", + }, }, - 'name': 'app2', - 'namespace': 'example-namespace', - 'prefix': 'resources-no-input-topic-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False + "name": "app2", + "namespace": "example-namespace", + "prefix": "resources-no-input-topic-pipeline-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", }, - 'to': { - 'models': { + "to": { + "models": { }, - 'topics': { - 'example-output-extra': { - 'configs': { + "topics": { + "example-output-extra": { + "configs": { }, - 'role': 'extra' + "role": "extra", }, - 'resources-no-input-topic-pipeline-app2-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' + "resources-no-input-topic-pipeline-app2-error": { + "configs": { + "cleanup.policy": "compact,delete", }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", }, - 'test-output-extra': { - 'configs': { + "test-output-extra": { + "configs": { }, - 'role': 'test-output' - } - } + "role": "test-output", + }, + }, }, - 'type': 'streams-app', - 'version': '2.4.2' - } - ] + "type": "streams-app", + "version": "2.4.2", + }, + ], } -snapshots['TestPipeline.test_no_user_defined_components test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_no_user_defined_components test-pipeline"] = { + "components": [ { - 'app': { - 'image': 'fake-image', - 'nameOverride': 'resources-no-user-defined-components-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-no-user-defined-components-streams-app-error', - 'inputTopics': [ - 'example-topic' + "app": { + "image": "fake-image", + "nameOverride": "resources-no-user-defined-components-streams-app", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-no-user-defined-components-streams-app-error", + "inputTopics": [ + "example-topic", ], - 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - }, - 'topics': { - 'example-topic': { - 'type': 'input' - } - } - }, - 'name': 'streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-no-user-defined-components-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-no-user-defined-components-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' - } - ] + "outputTopic": "example-output", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + }, + "topics": { + "example-topic": { + "type": "input", + }, + }, + }, + "name": "streams-app", + "namespace": "example-namespace", + "prefix": "resources-no-user-defined-components-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "example-output": { + "configs": { + }, + "type": "output", + }, + "resources-no-user-defined-components-streams-app-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", + }, + ], } -snapshots['TestPipeline.test_pipelines_with_env_values test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_pipelines_with_env_values test-pipeline"] = { + "components": [ { - 'app': { - 'commandLine': { - 'FAKE_ARG': 'override-arg' - }, - 'image': 'example-registry/fake-image', - 'imageTag': '0.0.1', - 'nameOverride': 'resources-pipeline-with-envs-input-producer', - 'schedule': '20 3/8 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-pipeline-with-envs-input-producer', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'input-producer', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-envs-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - 'com/bakdata/kafka/fake': '1.0.0' - }, - 'topics': { - 'resources-pipeline-with-envs-input-producer': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 12, - 'type': 'output', - 'value_schema': 'com.bakdata.fake.Produced' - } - } - }, - 'type': 'scheduled-producer', - 'version': '2.4.2' + "app": { + "commandLine": { + "FAKE_ARG": "override-arg", + }, + "image": "example-registry/fake-image", + "imageTag": "0.0.1", + "nameOverride": "resources-pipeline-with-envs-input-producer", + "schedule": "20 3/8 * * *", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "resources-pipeline-with-envs-input-producer", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "input-producer", + "namespace": "example-namespace", + "prefix": "resources-pipeline-with-envs-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + "com/bakdata/kafka/fake": "1.0.0", + }, + "topics": { + "resources-pipeline-with-envs-input-producer": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 12, + "type": "output", + "value_schema": "com.bakdata.fake.Produced", + }, + }, + }, + "type": "scheduled-producer", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'converter-resources-pipeline-with-envs-converter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - ] - }, - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-pipeline-with-envs-converter', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-envs-converter-error', - 'inputTopics': [ - 'resources-pipeline-with-envs-input-producer' + "app": { + "autoscaling": { + "consumerGroup": "converter-resources-pipeline-with-envs-converter", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 1, + "minReplicas": 0, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ ], - 'outputTopic': 'resources-pipeline-with-envs-converter', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'converter', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-envs-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-envs-converter': { - 'configs': { - 'cleanup.policy': 'compact,delete', - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-pipeline-with-envs-converter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 10, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'converter', - 'version': '2.4.2' + }, + "commandLine": { + "CONVERT_XML": True, + }, + "nameOverride": "resources-pipeline-with-envs-converter", + "resources": { + "limits": { + "memory": "2G", + }, + "requests": { + "memory": "2G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-pipeline-with-envs-converter-error", + "inputTopics": [ + "resources-pipeline-with-envs-input-producer", + ], + "outputTopic": "resources-pipeline-with-envs-converter", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "converter", + "namespace": "example-namespace", + "prefix": "resources-pipeline-with-envs-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-pipeline-with-envs-converter": { + "configs": { + "cleanup.policy": "compact,delete", + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-pipeline-with-envs-converter-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 10, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "converter", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-pipeline-with-envs-filter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 4, - 'minReplicas': 4, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-pipeline-with-envs-filter' - ] - }, - 'commandLine': { - 'TYPE': 'nothing' - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'resources-pipeline-with-envs-filter', - 'replicaCount': 4, - 'resources': { - 'requests': { - 'memory': '3G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-envs-filter-error', - 'inputTopics': [ - 'resources-pipeline-with-envs-converter' + "app": { + "autoscaling": { + "consumerGroup": "filter-resources-pipeline-with-envs-filter", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 4, + "minReplicas": 4, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ + "resources-pipeline-with-envs-filter", ], - 'outputTopic': 'resources-pipeline-with-envs-filter', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'filter', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-envs-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-envs-filter': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-pipeline-with-envs-filter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'filter', - 'version': '2.4.2' - } - ] + }, + "commandLine": { + "TYPE": "nothing", + }, + "image": "fake-registry/filter", + "imageTag": "2.4.1", + "nameOverride": "resources-pipeline-with-envs-filter", + "replicaCount": 4, + "resources": { + "requests": { + "memory": "3G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-pipeline-with-envs-filter-error", + "inputTopics": [ + "resources-pipeline-with-envs-converter", + ], + "outputTopic": "resources-pipeline-with-envs-filter", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "filter", + "namespace": "example-namespace", + "prefix": "resources-pipeline-with-envs-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-pipeline-with-envs-filter": { + "configs": { + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-pipeline-with-envs-filter-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "filter", + "version": "2.4.2", + }, + ], } -snapshots['TestPipeline.test_prefix_pipeline_component test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_prefix_pipeline_component test-pipeline"] = { + "components": [ { - 'app': { - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', - 'imageTag': '1.0.0', - 'nameOverride': 'from-pipeline-component-account-producer', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'schedule': '0 12 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'schemaRegistryUrl': 'http://localhost:8081' - }, - 'suspend': True - }, - 'name': 'account-producer', - 'namespace': '${NAMESPACE}', - 'prefix': 'from-pipeline-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'type': 'producer-app', - 'version': '2.9.0' - } - ] + "app": { + "debug": True, + "image": "${DOCKER_REGISTRY}/atm-demo-accountproducer", + "imageTag": "1.0.0", + "nameOverride": "from-pipeline-component-account-producer", + "prometheus": { + "jmx": { + "enabled": False, + }, + }, + "replicaCount": 1, + "schedule": "0 12 * * *", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "schemaRegistryUrl": "http://localhost:8081", + }, + "suspend": True, + }, + "name": "account-producer", + "namespace": "${NAMESPACE}", + "prefix": "from-pipeline-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "type": "producer-app", + "version": "2.9.0", + }, + ], } -snapshots['TestPipeline.test_read_from_component test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_read_from_component test-pipeline"] = { + "components": [ { - 'app': { - 'nameOverride': 'resources-read-from-component-producer1', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-read-from-component-producer1', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'producer1', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-producer1': { - 'configs': { - }, - 'type': 'output' - } - } - }, - 'type': 'producer-app', - 'version': '2.4.2' + "app": { + "nameOverride": "resources-read-from-component-producer1", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "resources-read-from-component-producer1", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "producer1", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-producer1": { + "configs": { + }, + "type": "output", + }, + }, + }, + "type": "producer-app", + "version": "2.4.2", }, { - 'app': { - 'nameOverride': 'producer2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-read-from-component-producer2', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'producer2', - 'namespace': 'example-namespace', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-producer2': { - 'configs': { - }, - 'type': 'output' - } - } - }, - 'type': 'producer-app', - 'version': '2.4.2' + "app": { + "nameOverride": "producer2", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "resources-read-from-component-producer2", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "producer2", + "namespace": "example-namespace", + "prefix": "", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-producer2": { + "configs": { + }, + "type": "output", + }, + }, + }, + "type": "producer-app", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-read-from-component-inflate-step', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-read-from-component-inflate-step' - ] - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'resources-read-from-component-inflate-step', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-inflate-step-error', - 'inputTopics': [ - 'resources-read-from-component-producer2' + "app": { + "autoscaling": { + "consumerGroup": "filter-resources-read-from-component-inflate-step", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 1, + "minReplicas": 0, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ + "resources-read-from-component-inflate-step", ], - 'outputTopic': 'resources-read-from-component-inflate-step', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'inflate-step', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-inflate-step': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-read-from-component-inflate-step-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'should-inflate', - 'version': '2.4.2' + }, + "image": "fake-registry/filter", + "imageTag": "2.4.1", + "nameOverride": "resources-read-from-component-inflate-step", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-inflate-step-error", + "inputTopics": [ + "resources-read-from-component-producer2", + ], + "outputTopic": "resources-read-from-component-inflate-step", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "inflate-step", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-inflate-step": { + "configs": { + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-read-from-component-inflate-step-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "should-inflate", + "version": "2.4.2", }, { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-read-from-component-inflate-step-inflated-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'resources-read-from-component-inflate-step', - 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1' - }, - 'name': 'inflate-step-inflated-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'to': { - 'models': { - }, - 'topics': { - 'inflate-step-inflated-sink-connector': { - 'configs': { - }, - 'role': 'test' - }, - 'kafka-sink-connector': { - 'configs': { - }, - 'type': 'output' - } - } - }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + "app": { + "batch.size": "2000", + "behavior.on.malformed.documents": "warn", + "behavior.on.null.values": "delete", + "connection.compression": "true", + "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", + "key.ignore": "false", + "linger.ms": "5000", + "max.buffered.records": "20000", + "name": "resources-read-from-component-inflate-step-inflated-sink-connector", + "read.timeout.ms": "120000", + "tasks.max": "1", + "topics": "resources-read-from-component-inflate-step", + "transforms.changeTopic.replacement": "resources-read-from-component-inflate-step-index-v1", + }, + "name": "inflate-step-inflated-sink-connector", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/", + }, + "resetter_values": { + }, + "to": { + "models": { + }, + "topics": { + "inflate-step-inflated-sink-connector": { + "configs": { + }, + "role": "test", + }, + "kafka-sink-connector": { + "configs": { + }, + "type": "output", + }, + }, + }, + "type": "kafka-sink-connector", + "version": "1.0.4", }, { - 'app': { - 'nameOverride': 'resources-read-from-component-inflate-step-inflated-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-inflate-step-inflated-streams-app-error', - 'inputTopics': [ - 'kafka-sink-connector' + "app": { + "nameOverride": "resources-read-from-component-inflate-step-inflated-streams-app", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-inflate-step-inflated-streams-app-error", + "inputTopics": [ + "kafka-sink-connector", ], - 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' - } + "outputTopic": "resources-read-from-component-inflate-step-inflate-step-inflated-streams-app", + "schemaRegistryUrl": "http://localhost:8081", + }, }, - 'name': 'inflate-step-inflated-streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False + "name": "inflate-step-inflated-streams-app", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", }, - 'to': { - 'models': { + "to": { + "models": { }, - 'topics': { - 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app': { - 'configs': { + "topics": { + "resources-read-from-component-inflate-step-inflate-step-inflated-streams-app": { + "configs": { }, - 'type': 'output' + "type": "output", }, - 'resources-read-from-component-inflate-step-inflated-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' + "resources-read-from-component-inflate-step-inflated-streams-app-error": { + "configs": { + "cleanup.policy": "compact,delete", }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, }, - 'type': 'streams-app', - 'version': '2.4.2' + "type": "streams-app", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-read-from-component-inflate-step-without-prefix', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-read-from-component-inflate-step-without-prefix' - ] - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'inflate-step-without-prefix', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-error', - 'inputTopics': [ - 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' + "app": { + "autoscaling": { + "consumerGroup": "filter-resources-read-from-component-inflate-step-without-prefix", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 1, + "minReplicas": 0, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ + "resources-read-from-component-inflate-step-without-prefix", + ], + }, + "image": "fake-registry/filter", + "imageTag": "2.4.1", + "nameOverride": "inflate-step-without-prefix", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-inflate-step-without-prefix-error", + "inputTopics": [ + "resources-read-from-component-inflate-step-inflate-step-inflated-streams-app", ], - 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'inflate-step-without-prefix', - 'namespace': 'example-namespace', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-inflate-step-without-prefix': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-read-from-component-inflate-step-without-prefix-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'should-inflate', - 'version': '2.4.2' + "outputTopic": "resources-read-from-component-inflate-step-without-prefix", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "inflate-step-without-prefix", + "namespace": "example-namespace", + "prefix": "", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-inflate-step-without-prefix": { + "configs": { + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-read-from-component-inflate-step-without-prefix-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "should-inflate", + "version": "2.4.2", }, { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'resources-read-from-component-inflate-step-without-prefix', - 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1' - }, - 'name': 'inflate-step-without-prefix-inflated-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'to': { - 'models': { - }, - 'topics': { - 'inflate-step-without-prefix-inflated-sink-connector': { - 'configs': { - }, - 'role': 'test' - }, - 'kafka-sink-connector': { - 'configs': { - }, - 'type': 'output' - } - } - }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + "app": { + "batch.size": "2000", + "behavior.on.malformed.documents": "warn", + "behavior.on.null.values": "delete", + "connection.compression": "true", + "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", + "key.ignore": "false", + "linger.ms": "5000", + "max.buffered.records": "20000", + "name": "resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector", + "read.timeout.ms": "120000", + "tasks.max": "1", + "topics": "resources-read-from-component-inflate-step-without-prefix", + "transforms.changeTopic.replacement": "resources-read-from-component-inflate-step-without-prefix-index-v1", + }, + "name": "inflate-step-without-prefix-inflated-sink-connector", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/", + }, + "resetter_values": { + }, + "to": { + "models": { + }, + "topics": { + "inflate-step-without-prefix-inflated-sink-connector": { + "configs": { + }, + "role": "test", + }, + "kafka-sink-connector": { + "configs": { + }, + "type": "output", + }, + }, + }, + "type": "kafka-sink-connector", + "version": "1.0.4", }, { - 'app': { - 'nameOverride': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error', - 'inputTopics': [ - 'kafka-sink-connector' + "app": { + "nameOverride": "resources-read-from-component-inflate-step-without-prefix-inflated-streams-app", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error", + "inputTopics": [ + "kafka-sink-connector", ], - 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' - } + "outputTopic": "inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app", + "schemaRegistryUrl": "http://localhost:8081", + }, }, - 'name': 'inflate-step-without-prefix-inflated-streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False + "name": "inflate-step-without-prefix-inflated-streams-app", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", }, - 'to': { - 'models': { + "to": { + "models": { }, - 'topics': { - 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app': { - 'configs': { + "topics": { + "inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app": { + "configs": { }, - 'type': 'output' + "type": "output", }, - 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' + "resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error": { + "configs": { + "cleanup.policy": "compact,delete", }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, }, - 'type': 'streams-app', - 'version': '2.4.2' + "type": "streams-app", + "version": "2.4.2", }, { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer1', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer1-error', - 'inputTopics': [ - 'resources-read-from-component-producer1' + "app": { + "nameOverride": "resources-read-from-component-consumer1", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-consumer1-error", + "inputTopics": [ + "resources-read-from-component-producer1", ], - 'outputTopic': 'resources-read-from-component-consumer1', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - 'producer1': { - 'type': 'input' - } - }, - 'topics': { - } - }, - 'name': 'consumer1', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer1': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-read-from-component-consumer1-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' + "outputTopic": "resources-read-from-component-consumer1", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + "producer1": { + "type": "input", + }, + }, + "topics": { + }, + }, + "name": "consumer1", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-consumer1": { + "configs": { + }, + "type": "output", + }, + "resources-read-from-component-consumer1-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", }, { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer2-error', - 'inputTopics': [ - 'resources-read-from-component-producer1', - 'resources-read-from-component-consumer1' + "app": { + "nameOverride": "resources-read-from-component-consumer2", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-consumer2-error", + "inputTopics": [ + "resources-read-from-component-producer1", + "resources-read-from-component-consumer1", ], - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - 'consumer1': { - 'type': 'input' - }, - 'producer1': { - 'type': 'input' - } - }, - 'topics': { - } - }, - 'name': 'consumer2', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer2-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + "consumer1": { + "type": "input", + }, + "producer1": { + "type": "input", + }, + }, + "topics": { + }, + }, + "name": "consumer2", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-consumer2-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", }, { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer3', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer3-error', - 'inputTopics': [ - 'resources-read-from-component-producer1', - 'resources-read-from-component-producer2' + "app": { + "nameOverride": "resources-read-from-component-consumer3", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-consumer3-error", + "inputTopics": [ + "resources-read-from-component-producer1", + "resources-read-from-component-producer2", ], - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - 'producer2': { - 'type': 'input' - } - }, - 'topics': { - 'resources-read-from-component-producer1': { - 'type': 'input' - } - } - }, - 'name': 'consumer3', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer3-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + "producer2": { + "type": "input", + }, + }, + "topics": { + "resources-read-from-component-producer1": { + "type": "input", + }, + }, + }, + "name": "consumer3", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-consumer3-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", }, { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer4', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer4-error', - 'inputTopics': [ - 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' + "app": { + "nameOverride": "resources-read-from-component-consumer4", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-consumer4-error", + "inputTopics": [ + "resources-read-from-component-inflate-step-inflate-step-inflated-streams-app", ], - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - 'inflate-step': { - 'type': 'input' - } - }, - 'topics': { - } - }, - 'name': 'consumer4', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer4-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + "inflate-step": { + "type": "input", + }, + }, + "topics": { + }, + }, + "name": "consumer4", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-consumer4-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", }, { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer5', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer5-error', - 'inputTopics': [ - 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app' + "app": { + "nameOverride": "resources-read-from-component-consumer5", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-read-from-component-consumer5-error", + "inputTopics": [ + "inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app", ], - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - 'inflate-step-without-prefix': { - 'type': 'input' - } - }, - 'topics': { - } - }, - 'name': 'consumer5', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer5-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' - } - ] + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + "inflate-step-without-prefix": { + "type": "input", + }, + }, + "topics": { + }, + }, + "name": "consumer5", + "namespace": "example-namespace", + "prefix": "resources-read-from-component-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-read-from-component-consumer5-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.4.2", + }, + ], } -snapshots['TestPipeline.test_substitute_in_component test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_substitute_in_component test-pipeline"] = { + "components": [ { - 'app': { - 'commandLine': { - 'FAKE_ARG': 'fake-arg-value' - }, - 'image': 'example-registry/fake-image', - 'imageTag': '0.0.1', - 'labels': { - 'app_name': 'scheduled-producer', - 'app_schedule': '30 3/8 * * *', - 'app_type': 'scheduled-producer' - }, - 'nameOverride': 'resources-component-type-substitution-scheduled-producer', - 'schedule': '30 3/8 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-component-type-substitution-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'scheduled-producer', - 'namespace': 'example-namespace', - 'prefix': 'resources-component-type-substitution-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - 'com/bakdata/kafka/fake': '1.0.0' - }, - 'topics': { - 'resources-component-type-substitution-scheduled-producer': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 12, - 'type': 'output', - 'value_schema': 'com.bakdata.fake.Produced' - } - } - }, - 'type': 'scheduled-producer', - 'version': '2.4.2' + "app": { + "commandLine": { + "FAKE_ARG": "fake-arg-value", + }, + "image": "example-registry/fake-image", + "imageTag": "0.0.1", + "labels": { + "app_name": "scheduled-producer", + "app_schedule": "30 3/8 * * *", + "app_type": "scheduled-producer", + }, + "nameOverride": "resources-component-type-substitution-scheduled-producer", + "schedule": "30 3/8 * * *", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "resources-component-type-substitution-scheduled-producer", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "scheduled-producer", + "namespace": "example-namespace", + "prefix": "resources-component-type-substitution-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + "com/bakdata/kafka/fake": "1.0.0", + }, + "topics": { + "resources-component-type-substitution-scheduled-producer": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 12, + "type": "output", + "value_schema": "com.bakdata.fake.Produced", + }, + }, + }, + "type": "scheduled-producer", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'converter-resources-component-type-substitution-converter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - ] - }, - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-component-type-substitution-converter', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-component-type-substitution-converter-error', - 'inputTopics': [ - 'resources-component-type-substitution-scheduled-producer' + "app": { + "autoscaling": { + "consumerGroup": "converter-resources-component-type-substitution-converter", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 1, + "minReplicas": 0, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ ], - 'outputTopic': 'resources-component-type-substitution-converter', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'converter', - 'namespace': 'example-namespace', - 'prefix': 'resources-component-type-substitution-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-component-type-substitution-converter': { - 'configs': { - 'cleanup.policy': 'compact,delete', - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-component-type-substitution-converter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 10, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'converter', - 'version': '2.4.2' + }, + "commandLine": { + "CONVERT_XML": True, + }, + "nameOverride": "resources-component-type-substitution-converter", + "resources": { + "limits": { + "memory": "2G", + }, + "requests": { + "memory": "2G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-component-type-substitution-converter-error", + "inputTopics": [ + "resources-component-type-substitution-scheduled-producer", + ], + "outputTopic": "resources-component-type-substitution-converter", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "converter", + "namespace": "example-namespace", + "prefix": "resources-component-type-substitution-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-component-type-substitution-converter": { + "configs": { + "cleanup.policy": "compact,delete", + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-component-type-substitution-converter-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 10, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "converter", + "version": "2.4.2", }, { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-component-type-substitution-filter-app', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 4, - 'minReplicas': 4, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-component-type-substitution-filter-app' - ] - }, - 'commandLine': { - 'TYPE': 'nothing' - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'labels': { - 'app_name': 'filter-app', - 'app_resources_requests_memory': '3G', - 'app_type': 'filter', - 'filter': 'filter-app-filter', - 'test_placeholder_in_placeholder': 'filter-app-filter' - }, - 'nameOverride': 'resources-component-type-substitution-filter-app', - 'replicaCount': 4, - 'resources': { - 'requests': { - 'memory': '3G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-component-type-substitution-filter-app-error', - 'inputTopics': [ - 'resources-component-type-substitution-converter' + "app": { + "autoscaling": { + "consumerGroup": "filter-resources-component-type-substitution-filter-app", + "cooldownPeriod": 300, + "enabled": True, + "lagThreshold": 10000, + "maxReplicas": 4, + "minReplicas": 4, + "offsetResetPolicy": "earliest", + "pollingInterval": 30, + "topics": [ + "resources-component-type-substitution-filter-app", + ], + }, + "commandLine": { + "TYPE": "nothing", + }, + "image": "fake-registry/filter", + "imageTag": "2.4.1", + "labels": { + "app_name": "filter-app", + "app_resources_requests_memory": "3G", + "app_type": "filter", + "filter": "filter-app-filter", + "test_placeholder_in_placeholder": "filter-app-filter", + }, + "nameOverride": "resources-component-type-substitution-filter-app", + "replicaCount": 4, + "resources": { + "requests": { + "memory": "3G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-component-type-substitution-filter-app-error", + "inputTopics": [ + "resources-component-type-substitution-converter", ], - 'outputTopic': 'resources-component-type-substitution-filter-app', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'filter-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-component-type-substitution-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-component-type-substitution-filter-app': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-component-type-substitution-filter-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'filter', - 'version': '2.4.2' - } - ] + "outputTopic": "resources-component-type-substitution-filter-app", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "filter-app", + "namespace": "example-namespace", + "prefix": "resources-component-type-substitution-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "resources-component-type-substitution-filter-app": { + "configs": { + "retention.ms": "-1", + }, + "partitions_count": 50, + "type": "output", + }, + "resources-component-type-substitution-filter-app-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "filter", + "version": "2.4.2", + }, + ], } -snapshots['TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline"] = { + "components": [ { - 'app': { - 'nameOverride': 'resources-custom-config-app1', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'app1', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'app1-test-topic': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } - } - }, - 'type': 'producer-app', - 'version': '2.9.0' + "app": { + "nameOverride": "resources-custom-config-app1", + "resources": { + "limits": { + "memory": "2G", + }, + "requests": { + "memory": "2G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "app1-test-topic", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "app1", + "namespace": "development-namespace", + "prefix": "resources-custom-config-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "app1-test-topic": { + "configs": { + }, + "partitions_count": 3, + "type": "output", + }, + }, + }, + "type": "producer-app", + "version": "2.9.0", }, { - 'app': { - 'image': 'some-image', - 'labels': { - 'pipeline': 'resources-custom-config' - }, - 'nameOverride': 'resources-custom-config-app2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'app2-dead-letter-topic', - 'inputTopics': [ - 'app1-test-topic' + "app": { + "image": "some-image", + "labels": { + "pipeline": "resources-custom-config", + }, + "nameOverride": "resources-custom-config-app2", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "errorTopic": "app2-dead-letter-topic", + "inputTopics": [ + "app1-test-topic", ], - 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'app2', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'app2-dead-letter-topic': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' - }, - 'app2-test-topic': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } - } - }, - 'type': 'streams-app', - 'version': '2.9.0' - } - ] + "outputTopic": "app2-test-topic", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "app2", + "namespace": "development-namespace", + "prefix": "resources-custom-config-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "app2-dead-letter-topic": { + "configs": { + }, + "partitions_count": 1, + "type": "error", + }, + "app2-test-topic": { + "configs": { + }, + "partitions_count": 3, + "type": "output", + }, + }, + }, + "type": "streams-app", + "version": "2.9.0", + }, + ], } -snapshots['TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline"] = { + "components": [ { - 'app': { - 'nameOverride': 'resources-custom-config-app1', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'app1', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'app1-test-topic': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } - } - }, - 'type': 'producer-app', - 'version': '2.9.0' + "app": { + "nameOverride": "resources-custom-config-app1", + "resources": { + "limits": { + "memory": "2G", + }, + "requests": { + "memory": "2G", + }, + }, + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "extraOutputTopics": { + }, + "outputTopic": "app1-test-topic", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "app1", + "namespace": "development-namespace", + "prefix": "resources-custom-config-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "app1-test-topic": { + "configs": { + }, + "partitions_count": 3, + "type": "output", + }, + }, + }, + "type": "producer-app", + "version": "2.9.0", }, { - 'app': { - 'image': 'some-image', - 'labels': { - 'pipeline': 'resources-custom-config' - }, - 'nameOverride': 'resources-custom-config-app2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'app2-dead-letter-topic', - 'inputTopics': [ - 'app1-test-topic' + "app": { + "image": "some-image", + "labels": { + "pipeline": "resources-custom-config", + }, + "nameOverride": "resources-custom-config-app2", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "errorTopic": "app2-dead-letter-topic", + "inputTopics": [ + "app1-test-topic", ], - 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'name': 'app2', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'app2-dead-letter-topic': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' - }, - 'app2-test-topic': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } - } - }, - 'type': 'streams-app', - 'version': '2.9.0' - } - ] + "outputTopic": "app2-test-topic", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "name": "app2", + "namespace": "development-namespace", + "prefix": "resources-custom-config-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "app2-dead-letter-topic": { + "configs": { + }, + "partitions_count": 1, + "type": "error", + }, + "app2-test-topic": { + "configs": { + }, + "partitions_count": 3, + "type": "output", + }, + }, + }, + "type": "streams-app", + "version": "2.9.0", + }, + ], } -snapshots['TestPipeline.test_with_env_defaults test-pipeline'] = { - 'components': [ +snapshots["TestPipeline.test_with_env_defaults test-pipeline"] = { + "components": [ { - 'app': { - 'image': 'fake-image', - 'nameOverride': 'resources-kafka-connect-sink-streams-app-development', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-kafka-connect-sink-streams-app-development-error', - 'inputTopics': [ - 'example-topic' + "app": { + "image": "fake-image", + "nameOverride": "resources-kafka-connect-sink-streams-app-development", + "streams": { + "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", + "config": { + "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", + }, + "errorTopic": "resources-kafka-connect-sink-streams-app-development-error", + "inputTopics": [ + "example-topic", ], - 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' - } - }, - 'from': { - 'components': { - }, - 'topics': { - 'example-topic': { - 'type': 'input' - } - } - }, - 'name': 'streams-app-development', - 'namespace': 'development-namespace', - 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-kafka-connect-sink-streams-app-development-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.9.0' + "outputTopic": "example-output", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + "from": { + "components": { + }, + "topics": { + "example-topic": { + "type": "input", + }, + }, + }, + "name": "streams-app-development", + "namespace": "development-namespace", + "prefix": "resources-kafka-connect-sink-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/", + }, + "to": { + "models": { + }, + "topics": { + "example-output": { + "configs": { + }, + "type": "output", + }, + "resources-kafka-connect-sink-streams-app-development-error": { + "configs": { + "cleanup.policy": "compact,delete", + }, + "partitions_count": 1, + "type": "error", + "value_schema": "com.bakdata.kafka.DeadLetter", + }, + }, + }, + "type": "streams-app", + "version": "2.9.0", }, { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-kafka-connect-sink-es-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'example-output' - }, - 'name': 'es-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - } - ] + "app": { + "batch.size": "2000", + "behavior.on.malformed.documents": "warn", + "behavior.on.null.values": "delete", + "connection.compression": "true", + "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", + "key.ignore": "false", + "linger.ms": "5000", + "max.buffered.records": "20000", + "name": "resources-kafka-connect-sink-es-sink-connector", + "read.timeout.ms": "120000", + "tasks.max": "1", + "topics": "example-output", + }, + "name": "es-sink-connector", + "namespace": "example-namespace", + "prefix": "resources-kafka-connect-sink-", + "repo_config": { + "repo_auth_flags": { + "insecure_skip_tls_verify": False, + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/", + }, + "resetter_values": { + }, + "type": "kafka-sink-connector", + "version": "1.0.4", + }, + ], } diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index ddbefad04..b1739e972 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -55,7 +55,8 @@ def inflate(self) -> list[PipelineComponent]: type=OutputTopicTypes.OUTPUT, ), TopicName("${component_name}"): TopicConfig( - type=None, role="test", + type=None, + role="test", ), }, ), @@ -80,7 +81,9 @@ def inflate(self) -> list[PipelineComponent]: class TestSchemaProvider(SchemaProvider): def provide_schema( - self, schema_class: str, models: dict[ModelName, ModelVersion], + self, + schema_class: str, + models: dict[ModelName, ModelVersion], ) -> Schema: schema = { "type": "record", diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 1a105bec4..ceda59d80 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -48,7 +48,8 @@ def test_load_pipeline(self, snapshot: SnapshotTest): snapshot.assert_match(enriched_pipeline, "test-pipeline") def test_generate_with_steps_flag_should_write_log_warning( - self, caplog: pytest.LogCaptureFixture, + self, + caplog: pytest.LogCaptureFixture, ): result = runner.invoke( app, diff --git a/tests/utils/test_environment.py b/tests/utils/test_environment.py index 7cd5a2430..e1da952b3 100644 --- a/tests/utils/test_environment.py +++ b/tests/utils/test_environment.py @@ -91,7 +91,8 @@ def test_windows_behaviour_keys_transformation(system, fake_environment_windows) @patch("platform.system") def test_windows_behaviour_keys_transformation_as_kwargs( - system, fake_environment_windows, + system, + fake_environment_windows, ): system.return_value = "Windows" environment = Environment(**fake_environment_windows) From 14c3c99b29da24d6f92a3fccf5f936819d9d6904 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 01:55:36 +0300 Subject: [PATCH 40/76] chore: lint --- hooks/gen_docs/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hooks/gen_docs/__init__.py b/hooks/gen_docs/__init__.py index 5052e8077..5a0d63a28 100644 --- a/hooks/gen_docs/__init__.py +++ b/hooks/gen_docs/__init__.py @@ -5,7 +5,7 @@ class IterableStrEnum(str, Enum): - """Polyfill that also introduces dict-like behavior + """Polyfill that also introduces dict-like behavior. Introduces constructors that return a ``Iterator`` object either containing all items, only their names or their values. From c272a0621ba39cd1b2dd0c681db4813d4019ea1d Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 02:01:33 +0300 Subject: [PATCH 41/76] ci(ruff): enable "TCH" partly --- kpops/cli/main.py | 3 ++- kpops/cli/registry.py | 6 ++++-- kpops/component_handlers/helm_wrapper/helm.py | 5 ++++- kpops/pipeline_generator/pipeline.py | 7 +++++-- kpops/utils/dict_differ.py | 6 ++++-- pyproject.toml | 4 +++- 6 files changed, 22 insertions(+), 9 deletions(-) diff --git a/kpops/cli/main.py b/kpops/cli/main.py index cc5ebf65d..6e45cdb1c 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from collections.abc import Iterator from enum import Enum from pathlib import Path from typing import TYPE_CHECKING, Optional @@ -24,6 +23,8 @@ from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema if TYPE_CHECKING: + from collections.abc import Iterator + from kpops.components.base_components import PipelineComponent LOG_DIVIDER = "#" * 100 diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index 838c736d7..5f11e7b9b 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -3,15 +3,17 @@ import importlib import inspect import sys -from collections.abc import Iterator from dataclasses import dataclass, field from pathlib import Path -from typing import TypeVar +from typing import TYPE_CHECKING, TypeVar from kpops import __name__ from kpops.cli.exception import ClassNotFoundError from kpops.components.base_components.pipeline_component import PipelineComponent +if TYPE_CHECKING: + from collections.abc import Iterator + KPOPS_MODULE = __name__ + "." T = TypeVar("T") diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 0dd6b26bc..9436fd60b 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -4,7 +4,7 @@ import re import subprocess import tempfile -from collections.abc import Iterable, Iterator +from typing import TYPE_CHECKING import yaml @@ -19,6 +19,9 @@ Version, ) +if TYPE_CHECKING: + from collections.abc import Iterable, Iterator + log = logging.getLogger("Helm") diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index c95a7e904..fc2839082 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -3,9 +3,8 @@ import json import logging from collections import Counter -from collections.abc import Iterator from contextlib import suppress -from pathlib import Path +from typing import TYPE_CHECKING import yaml from pydantic import BaseModel @@ -20,6 +19,10 @@ from kpops.utils.environment import ENV from kpops.utils.yaml_loading import load_yaml_file, substitute, substitute_nested +if TYPE_CHECKING: + from collections.abc import Iterator + from pathlib import Path + log = logging.getLogger("PipelineGenerator") diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index 50cb5a7f6..5bc8d720a 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -1,15 +1,17 @@ from __future__ import annotations -from collections.abc import Iterable, Iterator, Sequence from dataclasses import dataclass from difflib import Differ from enum import Enum -from typing import Generic, TypeVar +from typing import TYPE_CHECKING, Generic, TypeVar import typer import yaml from dictdiffer import diff, patch +if TYPE_CHECKING: + from collections.abc import Iterable, Iterator, Sequence + differ = Differ() diff --git a/pyproject.toml b/pyproject.toml index 2b667a2d7..516c6afe9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,6 +99,8 @@ ignore = [ "RET506", # Unnecessary {branch} after raise statement -- Lots of false positives "RET507", # Unnecessary {branch} after continue statement -- Lots of false positives "RET508", # Unnecessary {branch} after break statement -- Lots of false positives + "TCH001", # + "TCH002", "PLR09", # upper bound on number of arguments, functions, etc. -- Inconvenient to enforce "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce "PLW2901", # `for` loop variable `{var}` overwritten by assignment target -- Inconvenient to enforce @@ -134,7 +136,7 @@ select = [ "RET", # flake8-return "SLOT", # flake8-slots "SIM", # flake8-simplify - # "TCH", # flake8-type-checking, configure correctly and add + "TCH", # flake8-type-checking, configure correctly and add "PTH", # flake8-use-pathlib "PGH", # pygrep-hooks "PL", # Pylint From 750555f621c5bda7fb060b4f7260fdbcfdc18d24 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 02:05:24 +0300 Subject: [PATCH 42/76] ci(ruff): enable "TCH002" --- .../kafka_connect/kafka_connect_handler.py | 7 ++++--- pyproject.toml | 2 +- tests/cli/test_schema_generation.py | 5 ++++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 4c21f7127..bec3aaa82 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -1,5 +1,6 @@ from __future__ import annotations +import contextlib import logging from typing import TYPE_CHECKING @@ -13,12 +14,12 @@ from kpops.utils.colorify import magentaify from kpops.utils.dict_differ import render_diff -try: +with contextlib.suppress(ImportError): from typing import Self -except ImportError: - from typing_extensions import Self if TYPE_CHECKING: + from typing_extensions import Self + from kpops.cli.pipeline_config import PipelineConfig log = logging.getLogger("KafkaConnectHandler") diff --git a/pyproject.toml b/pyproject.toml index 516c6afe9..30ff10304 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,7 +100,7 @@ ignore = [ "RET507", # Unnecessary {branch} after continue statement -- Lots of false positives "RET508", # Unnecessary {branch} after break statement -- Lots of false positives "TCH001", # - "TCH002", + # "TCH002", "PLR09", # upper bound on number of arguments, functions, etc. -- Inconvenient to enforce "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce "PLW2901", # `for` loop variable `{var}` overwritten by assignment target -- Inconvenient to enforce diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index be5b17ae6..1d61368d1 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -3,10 +3,10 @@ import logging from abc import ABC, abstractmethod from pathlib import Path +from typing import TYPE_CHECKING import pytest from pydantic import Field -from snapshottest.module import SnapshotTest from typer.testing import CliRunner from kpops.cli.main import app @@ -14,6 +14,9 @@ from kpops.utils.docstring import describe_attr from tests.cli.resources import empty_module +if TYPE_CHECKING: + from snapshottest.module import SnapshotTest + RESOURCE_PATH = Path(__file__).parent / "resources" From 502574b3e988e345d065b4f123dccd635defbb1d Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 02:10:47 +0300 Subject: [PATCH 43/76] ci(ruff): configure --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 30ff10304..7563a267a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,8 +99,7 @@ ignore = [ "RET506", # Unnecessary {branch} after raise statement -- Lots of false positives "RET507", # Unnecessary {branch} after continue statement -- Lots of false positives "RET508", # Unnecessary {branch} after break statement -- Lots of false positives - "TCH001", # - # "TCH002", + "TCH001", # Move application import {} into a type-checking block -- Breaks KPOps "PLR09", # upper bound on number of arguments, functions, etc. -- Inconvenient to enforce "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce "PLW2901", # `for` loop variable `{var}` overwritten by assignment target -- Inconvenient to enforce From 31cd2be5fbc74aa01900f99c491198c44f2a079a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 02:13:51 +0300 Subject: [PATCH 44/76] chore: remove leftover file --- setup.cfg | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 setup.cfg diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 89429d3e0..000000000 --- a/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -[flake8] -exclude = - .git, - __pycache__ -max-complexity = 10 -# black docs regarding flake8: https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#flake8 -# black enforces an equal amount of whitespace around slice operators. It is not PEP8 compliant. -# black and flake8 also disagree on line length -extend-ignore = - # E203: Whitespace before ':' - E203, - # E501: Line too long - E501, -per-file-ignores = - # F401: unused imports - tests/*/__init__.py: F401 - -[isort] -profile = black From 9d4a76160f60572b6ebabac20f5e70860cf1480f Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 02:21:09 +0300 Subject: [PATCH 45/76] ci: update workflow --- .github/workflows/ci.yaml | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index baa091133..df10833f5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,11 +35,8 @@ jobs: - name: Install dependencies run: poetry install --no-interaction - - name: Lint (flake8) - run: poetry run pre-commit run flake8 --all-files --show-diff-on-failure - - - name: Order of imports (isort) - run: poetry run pre-commit run isort --all-files --show-diff-on-failure + - name: Lint (ruff) + run: poetry run pre-commit run ruff --all-files --show-diff-on-failure - name: Formatting (black) run: poetry run pre-commit run black --all-files --show-diff-on-failure @@ -59,11 +56,6 @@ jobs: - name: Generate pipeline definitions run: poetry run pre-commit run gen-docs-components --all-files --show-diff-on-failure - # TODO: enable when PEP 604 incompatibilty is in typer is resolved https://github.com/tiangolo/typer/issues/348 - # See https://github.com/tiangolo/typer/pull/522 - # - name: Syntax (pyupgrade) - # run: poetry run pre-commit run --hook-stage manual pyupgrade --all-files - - name: Test run: poetry run pytest tests From 25b0c087df9fc44c62188c85bc3d7da00177437a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 02:25:28 +0300 Subject: [PATCH 46/76] chore: pass tests --- .../kafka_connect/kafka_connect_handler.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index bec3aaa82..2c24c4d67 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -1,6 +1,5 @@ from __future__ import annotations -import contextlib import logging from typing import TYPE_CHECKING @@ -14,11 +13,11 @@ from kpops.utils.colorify import magentaify from kpops.utils.dict_differ import render_diff -with contextlib.suppress(ImportError): - from typing import Self - if TYPE_CHECKING: - from typing_extensions import Self + try: + from typing import Self + except ImportError: + from typing_extensions import Self from kpops.cli.pipeline_config import PipelineConfig From 0b9f26ceeece14c6b1c7dfc9ae6501d4ec104209 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 10:36:33 +0300 Subject: [PATCH 47/76] ci(ruff-type-checking): Add "TCH001" with proper config --- kpops/component_handlers/__init__.py | 7 +++---- .../kafka_connect/kafka_connect_handler.py | 2 +- kpops/component_handlers/schema_handler/schema_handler.py | 7 +++++-- kpops/component_handlers/schema_handler/schema_provider.py | 7 ++++--- kpops/components/base_components/kafka_connector.py | 6 ++++-- kpops/pipeline_generator/pipeline.py | 7 ++++--- pyproject.toml | 5 ++++- 7 files changed, 25 insertions(+), 16 deletions(-) diff --git a/kpops/component_handlers/__init__.py b/kpops/component_handlers/__init__.py index 988ca7ee7..fa296a574 100644 --- a/kpops/component_handlers/__init__.py +++ b/kpops/component_handlers/__init__.py @@ -2,11 +2,10 @@ from typing import TYPE_CHECKING -from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( - KafkaConnectHandler, -) - if TYPE_CHECKING: + from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( + KafkaConnectHandler, + ) from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 2c24c4d67..d4b00d6aa 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -8,7 +8,6 @@ ConnectorNotFoundException, ConnectorStateException, ) -from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig from kpops.component_handlers.kafka_connect.timeout import timeout from kpops.utils.colorify import magentaify from kpops.utils.dict_differ import render_diff @@ -20,6 +19,7 @@ from typing_extensions import Self from kpops.cli.pipeline_config import PipelineConfig + from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig log = logging.getLogger("KafkaConnectHandler") diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index b402c6024..1a05ec86c 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -3,22 +3,25 @@ import json import logging from functools import cached_property +from typing import TYPE_CHECKING from schema_registry.client import SchemaRegistryClient from schema_registry.client.schema import AvroSchema from kpops.cli.exception import ClassNotFoundError -from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import find_class from kpops.component_handlers.schema_handler.schema_provider import ( Schema, SchemaProvider, ) -from kpops.components.base_components.models.to_section import ToSection from kpops.utils.colorify import greenify, magentaify log = logging.getLogger("SchemaHandler") +if TYPE_CHECKING: + from kpops.cli.pipeline_config import PipelineConfig + from kpops.components.base_components.models.to_section import ToSection + class SchemaHandler: def __init__(self, url: str, components_module: str | None): diff --git a/kpops/component_handlers/schema_handler/schema_provider.py b/kpops/component_handlers/schema_handler/schema_provider.py index 78f653270..253491e9b 100644 --- a/kpops/component_handlers/schema_handler/schema_provider.py +++ b/kpops/component_handlers/schema_handler/schema_provider.py @@ -1,14 +1,15 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import TypeAlias +from typing import TYPE_CHECKING, TypeAlias from schema_registry.client.schema import AvroSchema, JsonSchema -from kpops.components.base_components.models import ModelName, ModelVersion - Schema: TypeAlias = AvroSchema | JsonSchema +if TYPE_CHECKING: + from kpops.components.base_components.models import ModelName, ModelVersion + class SchemaProvider(ABC): @abstractmethod diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index a69bf3ab1..6420662a3 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -3,7 +3,7 @@ import logging from abc import ABC from functools import cached_property -from typing import Any, NoReturn +from typing import TYPE_CHECKING, Any, NoReturn from pydantic import Field, validator from typing_extensions import override @@ -25,11 +25,13 @@ KafkaConnectResetterValues, ) from kpops.components.base_components.base_defaults_component import deduplicate -from kpops.components.base_components.models.from_section import FromTopic from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr +if TYPE_CHECKING: + from kpops.components.base_components.models.from_section import FromTopic + log = logging.getLogger("KafkaConnector") diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index fc2839082..96588beee 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -11,9 +11,6 @@ from rich.console import Console from rich.syntax import Syntax -from kpops.cli.pipeline_config import PipelineConfig -from kpops.cli.registry import Registry -from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.dict_ops import generate_substitution, update_nested_pair from kpops.utils.environment import ENV @@ -23,6 +20,10 @@ from collections.abc import Iterator from pathlib import Path + from kpops.cli.pipeline_config import PipelineConfig + from kpops.cli.registry import Registry + from kpops.component_handlers import ComponentHandlers + log = logging.getLogger("PipelineGenerator") diff --git a/pyproject.toml b/pyproject.toml index 7563a267a..9855498b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,7 +99,7 @@ ignore = [ "RET506", # Unnecessary {branch} after raise statement -- Lots of false positives "RET507", # Unnecessary {branch} after continue statement -- Lots of false positives "RET508", # Unnecessary {branch} after break statement -- Lots of false positives - "TCH001", # Move application import {} into a type-checking block -- Breaks KPOps + # "TCH001", # Move application import {} into a type-checking block -- Breaks KPOps "PLR09", # upper bound on number of arguments, functions, etc. -- Inconvenient to enforce "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce "PLW2901", # `for` loop variable `{var}` overwritten by assignment target -- Inconvenient to enforce @@ -155,6 +155,9 @@ exclude = ["tests/*snapshots/*"] [tool.ruff.flake8-bugbear] extend-immutable-calls = ["typer.Argument"] +[tool.ruff.flake8-type-checking] +runtime-evaluated-base-classes = ["pydantic.BaseModel", "kpops.components.base_components.kafka_app.KafkaApp"] + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" From bdee94d87af68b1251d57f60f2d418c615008026 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 11:20:56 +0300 Subject: [PATCH 48/76] style: one-liner --- kpops/components/base_components/base_defaults_component.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index c7fd0d68f..8f3f0929b 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -173,9 +173,7 @@ def defaults_from_yaml(path: Path, key: str) -> dict: msg = ( "Default files should be structured as map ([app type] -> [default config]" ) - raise TypeError( - msg, - ) + raise TypeError(msg) value = content.get(key) if value is None: return {} From 847d4e9b4e5639c1229adcf4f5d6a9bfaa2dc8cc Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 12:48:32 +0300 Subject: [PATCH 49/76] ci(ruff): Ignore COM812 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 9855498b9..ddb5e8c72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -94,6 +94,7 @@ ignore = [ "B010", # Do not call setattr with a constant attribute value. -- Not always applicable "RUF012", # type class attrs with `ClassVar` -- Too strict/trigger-happy "UP007", # Use X | Y for type annotations -- `typer` doesn't support it + "COM812", # Checks for the absence of trailing commas -- leads to undesirable behavior from formatters "PIE804", # Unnecessary `dict` kwargs -- Inconvenient to enforce "RET505", # Unnecessary {branch} after return statement -- Lots of false positives "RET506", # Unnecessary {branch} after raise statement -- Lots of false positives From 5a6c3d14afcff478490e25d54187e1c6f849e6e5 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 13:24:58 +0300 Subject: [PATCH 50/76] Revert "ci(ruff-commas): autofix" This reverts commit 301284eb02dbb707fefa5ab50476b06ebee757ff. --- kpops/cli/custom_formatter.py | 4 +- kpops/cli/main.py | 69 ++++---------- kpops/cli/pipeline_config.py | 3 +- kpops/cli/registry.py | 6 +- .../helm_wrapper/dry_run_handler.py | 2 +- kpops/component_handlers/helm_wrapper/helm.py | 10 +- .../component_handlers/helm_wrapper/model.py | 23 ++--- .../component_handlers/helm_wrapper/utils.py | 2 +- .../kafka_connect/connect_wrapper.py | 31 +++--- .../kafka_connect/kafka_connect_handler.py | 26 ++--- .../kafka_connect/timeout.py | 2 +- .../schema_handler/schema_handler.py | 51 ++++------ .../schema_handler/schema_provider.py | 8 +- kpops/component_handlers/topic/handler.py | 51 ++++------ .../component_handlers/topic/proxy_wrapper.py | 4 +- kpops/component_handlers/topic/utils.py | 9 +- .../base_defaults_component.py | 23 ++--- kpops/components/base_components/kafka_app.py | 23 ++--- .../base_components/kafka_connector.py | 31 +++--- .../base_components/kubernetes_app.py | 8 +- .../base_components/models/from_section.py | 3 +- .../base_components/models/to_section.py | 13 +-- .../streams_bootstrap/producer/model.py | 9 +- .../streams_bootstrap/streams/model.py | 26 ++--- kpops/pipeline_generator/pipeline.py | 48 ++++------ kpops/utils/dict_differ.py | 8 +- kpops/utils/dict_ops.py | 4 +- kpops/utils/environment.py | 2 +- kpops/utils/gen_schema.py | 17 +--- kpops/utils/yaml_loading.py | 11 +-- tests/cli/resources/module.py | 4 +- tests/cli/test_pipeline_steps.py | 10 +- tests/cli/test_schema_generation.py | 10 +- tests/compiler/test_pipeline_name.py | 3 +- .../helm_wrapper/test_dry_run_handler.py | 12 +-- .../helm_wrapper/test_helm_diff.py | 4 +- .../helm_wrapper/test_helm_wrapper.py | 69 +++++--------- .../helm_wrapper/test_utils.py | 2 +- .../kafka_connect/test_connect_handler.py | 50 +++++----- .../kafka_connect/test_connect_wrapper.py | 70 +++++--------- .../schema_handler/resources/module.py | 4 +- .../schema_handler/test_schema_handler.py | 69 +++++--------- .../topic/test_proxy_wrapper.py | 27 ++---- .../topic/test_topic_handler.py | 71 ++++++-------- tests/component_handlers/topic/test_utils.py | 6 +- .../test_base_defaults_component.py | 28 ++---- tests/components/test_kafka_app.py | 3 +- tests/components/test_kafka_connector.py | 12 +-- tests/components/test_kafka_sink_connector.py | 51 ++++------ .../components/test_kafka_source_connector.py | 39 +++----- tests/components/test_kubernetes_app.py | 15 +-- tests/components/test_producer_app.py | 39 +++----- tests/components/test_streams_app.py | 95 +++++++------------ tests/pipeline/test_components/components.py | 17 ++-- .../components.py | 2 +- tests/pipeline/test_pipeline.py | 5 +- tests/utils/test_dict_ops.py | 2 +- tests/utils/test_diff.py | 2 +- tests/utils/test_environment.py | 3 +- 59 files changed, 449 insertions(+), 802 deletions(-) diff --git a/kpops/cli/custom_formatter.py b/kpops/cli/custom_formatter.py index fb5e44057..69fc1c73d 100644 --- a/kpops/cli/custom_formatter.py +++ b/kpops/cli/custom_formatter.py @@ -16,9 +16,7 @@ def format(self, record): logging.WARNING: typer.style(message_format, fg=typer.colors.YELLOW), logging.ERROR: typer.style(message_format, fg=typer.colors.RED), logging.CRITICAL: typer.style( - message_format, - fg=typer.colors.RED, - bold=True, + message_format, fg=typer.colors.RED, bold=True ), } diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 6e45cdb1c..c7f0e26a1 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -122,17 +122,12 @@ def setup_pipeline( handlers = setup_handlers(components_module, pipeline_config) return Pipeline.load_from_yaml( - pipeline_base_dir, - pipeline_path, - registry, - pipeline_config, - handlers, + pipeline_base_dir, pipeline_path, registry, pipeline_config, handlers ) def setup_handlers( - components_module: str | None, - config: PipelineConfig, + components_module: str | None, config: PipelineConfig ) -> ComponentHandlers: schema_handler = SchemaHandler.load_schema_handler(components_module, config) connector_handler = KafkaConnectHandler.from_pipeline_config(config) @@ -155,15 +150,13 @@ def get_step_names(steps_to_apply: list[PipelineComponent]) -> list[str]: def filter_steps_to_apply( - pipeline: Pipeline, - steps: set[str], - filter_type: FilterType, + pipeline: Pipeline, steps: set[str], filter_type: FilterType ) -> list[PipelineComponent]: def is_in_steps(component: PipelineComponent) -> bool: return component.name in steps log.debug( - f"KPOPS_PIPELINE_STEPS is defined with values: {steps} and filter type of {filter_type.value}", + f"KPOPS_PIPELINE_STEPS is defined with values: {steps} and filter type of {filter_type.value}" ) filtered_steps = [ component @@ -179,9 +172,7 @@ def is_in_steps(component: PipelineComponent) -> bool: def get_steps_to_apply( - pipeline: Pipeline, - steps: str | None, - filter_type: FilterType, + pipeline: Pipeline, steps: str | None, filter_type: FilterType ) -> list[PipelineComponent]: if steps: return filter_steps_to_apply(pipeline, parse_steps(steps), filter_type) @@ -189,9 +180,7 @@ def get_steps_to_apply( def reverse_pipeline_steps( - pipeline: Pipeline, - steps: str | None, - filter_type: FilterType, + pipeline: Pipeline, steps: str | None, filter_type: FilterType ) -> Iterator[PipelineComponent]: return reversed(get_steps_to_apply(pipeline, steps, filter_type)) @@ -205,9 +194,7 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_pipeline_config( - config: Path, - defaults: Optional[Path], - verbose: bool, + config: Path, defaults: Optional[Path], verbose: bool ) -> PipelineConfig: setup_logging_level(verbose) PipelineConfig.Config.config_path = config @@ -224,7 +211,7 @@ def create_pipeline_config( Generate json schema. The schemas can be used to enable support for kpops files in a text editor. - """, + """ ) def schema( scope: SchemaScope = typer.Argument( @@ -239,8 +226,7 @@ def schema( ), components_module: Optional[str] = COMPONENTS_MODULES, include_stock_components: bool = typer.Option( - default=True, - help="Include the built-in KPOps components.", + default=True, help="Include the built-in KPOps components." ), ) -> None: match scope: @@ -251,7 +237,7 @@ def schema( @app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 - help="Enriches pipelines steps with defaults. The output is used as input for the deploy/destroy/... commands.", + help="Enriches pipelines steps with defaults. The output is used as input for the deploy/destroy/... commands." ) def generate( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -266,10 +252,7 @@ def generate( ) -> Pipeline: pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, - pipeline_path, - components_module, - pipeline_config, + pipeline_base_dir, pipeline_path, components_module, pipeline_config ) if not template: @@ -282,14 +265,14 @@ def generate( elif steps: log.warning( "The following flags are considered only when `--template` is set: \n \ - '--steps'", + '--steps'" ) return pipeline @app.command( - help="Deploy pipeline steps", + help="Deploy pipeline steps" ) # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def deploy( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -304,10 +287,7 @@ def deploy( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, - pipeline_path, - components_module, - pipeline_config, + pipeline_base_dir, pipeline_path, components_module, pipeline_config ) steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) @@ -317,7 +297,7 @@ def deploy( @app.command( - help="Destroy pipeline steps", + help="Destroy pipeline steps" ) # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def destroy( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -332,10 +312,7 @@ def destroy( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, - pipeline_path, - components_module, - pipeline_config, + pipeline_base_dir, pipeline_path, components_module, pipeline_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -344,7 +321,7 @@ def destroy( @app.command( - help="Reset pipeline steps", + help="Reset pipeline steps" ) # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def reset( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -359,10 +336,7 @@ def reset( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, - pipeline_path, - components_module, - pipeline_config, + pipeline_base_dir, pipeline_path, components_module, pipeline_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -372,7 +346,7 @@ def reset( @app.command( - help="Clean pipeline steps", + help="Clean pipeline steps" ) # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def clean( pipeline_path: Path = PIPELINE_PATH_ARG, @@ -387,10 +361,7 @@ def clean( ): pipeline_config = create_pipeline_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, - pipeline_path, - components_module, - pipeline_config, + pipeline_base_dir, pipeline_path, components_module, pipeline_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index 58e731db2..1400323f5 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -112,8 +112,7 @@ def customise_sources( env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> tuple[ - SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], - ..., + SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], ... ]: return ( env_settings, diff --git a/kpops/cli/registry.py b/kpops/cli/registry.py index 5f11e7b9b..a97e2cd91 100644 --- a/kpops/cli/registry.py +++ b/kpops/cli/registry.py @@ -41,9 +41,7 @@ def __getitem__(self, component_type: str) -> type[PipelineComponent]: return self._classes[component_type] except KeyError as ke: msg = f"Could not find a component of type {component_type}" - raise ClassNotFoundError( - msg, - ) from ke + raise ClassNotFoundError(msg) from ke def find_class(module_name: str, baseclass: type[T]) -> type[T]: @@ -59,7 +57,7 @@ def _find_classes(module_name: str, baseclass: type[T]) -> Iterator[type[T]]: if issubclass(_class, baseclass): # filter out internal kpops classes unless specifically requested if _class.__module__.startswith( - KPOPS_MODULE, + KPOPS_MODULE ) and not module_name.startswith(KPOPS_MODULE): continue yield _class diff --git a/kpops/component_handlers/helm_wrapper/dry_run_handler.py b/kpops/component_handlers/helm_wrapper/dry_run_handler.py index 7b1429dab..2d28957b7 100644 --- a/kpops/component_handlers/helm_wrapper/dry_run_handler.py +++ b/kpops/component_handlers/helm_wrapper/dry_run_handler.py @@ -18,7 +18,7 @@ def print_helm_diff(self, stdout: str, helm_release_name: str, log: Logger) -> N :param log: The Logger object of the component class """ current_release = list( - self._helm.get_manifest(helm_release_name, self.namespace), + self._helm.get_manifest(helm_release_name, self.namespace) ) if current_release: log.info(f"Helm release {helm_release_name} already exists") diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 9436fd60b..b1b101b41 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -32,9 +32,7 @@ def __init__(self, helm_config: HelmConfig) -> None: self._version = self.get_version() if self._version.major != 3: msg = f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}" - raise RuntimeError( - msg, - ) + raise RuntimeError(msg) def add_repo( self, @@ -124,7 +122,7 @@ def uninstall( return self.__execute(command) except ReleaseNotFoundException: log.warning( - f"Release with name {release_name} not found. Could not uninstall app.", + f"Release with name {release_name} not found. Could not uninstall app." ) def template( @@ -187,9 +185,7 @@ def get_version(self) -> Version: version_match = re.search(r"^v(\d+(?:\.\d+){0,2})", short_version) if version_match is None: msg = f"Could not parse the Helm version.\n\nHelm output:\n{short_version}" - raise RuntimeError( - msg, - ) + raise RuntimeError(msg) version = map(int, version_match.group(1).split(".")) return Version(*version) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index dce229fa0..af21abb3f 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -31,24 +31,19 @@ class RepoAuthFlags(BaseModel): """ username: str | None = Field( - default=None, - description=describe_attr("username", __doc__), + default=None, description=describe_attr("username", __doc__) ) password: str | None = Field( - default=None, - description=describe_attr("password", __doc__), + default=None, description=describe_attr("password", __doc__) ) ca_file: Path | None = Field( - default=None, - description=describe_attr("ca_file", __doc__), + default=None, description=describe_attr("ca_file", __doc__) ) cert_file: Path | None = Field( - default=None, - description=describe_attr("cert_file", __doc__), + default=None, description=describe_attr("cert_file", __doc__) ) insecure_skip_tls_verify: bool = Field( - default=False, - description=describe_attr("insecure_skip_tls_verify", __doc__), + default=False, description=describe_attr("insecure_skip_tls_verify", __doc__) ) class Config(DescConfig): @@ -78,13 +73,11 @@ class HelmRepoConfig(BaseModel): """ repository_name: str = Field( - default=..., - description=describe_attr("repository_name", __doc__), + default=..., description=describe_attr("repository_name", __doc__) ) url: str = Field(default=..., description=describe_attr("url", __doc__)) repo_auth_flags: RepoAuthFlags = Field( - default=RepoAuthFlags(), - description=describe_attr("repo_auth_flags", __doc__), + default=RepoAuthFlags(), description=describe_attr("repo_auth_flags", __doc__) ) class Config(DescConfig): @@ -138,7 +131,7 @@ def to_command(self) -> list[str]: [ "--set-file", ",".join([f"{key}={path}" for key, path in self.set_file.items()]), - ], + ] ) if self.create_namespace: command.append("--create-namespace") diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index e05ee187f..7ad76b93a 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -16,7 +16,7 @@ def trim_release_name(name: str, suffix: str = "") -> str: if len(name) > RELEASE_NAME_MAX_LEN: new_name = name[: (RELEASE_NAME_MAX_LEN - len(suffix))] + suffix log.critical( - f"Invalid Helm release name '{name}'. Truncating to {RELEASE_NAME_MAX_LEN} characters: \n {name} --> {new_name}", + f"Invalid Helm release name '{name}'. Truncating to {RELEASE_NAME_MAX_LEN} characters: \n {name} --> {new_name}" ) name = new_name return name diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 35c043dd1..13f02a80d 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -36,8 +36,7 @@ def host(self) -> str: return self._host def create_connector( - self, - connector_config: KafkaConnectorConfig, + self, connector_config: KafkaConnectorConfig ) -> KafkaConnectResponse: """Create a new connector. @@ -48,9 +47,7 @@ def create_connector( config_json = connector_config.dict() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self._host}/connectors", - headers=HEADERS, - json=connect_data, + url=f"{self._host}/connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -58,7 +55,7 @@ def create_connector( return KafkaConnectResponse(**response.json()) elif response.status_code == httpx.codes.CONFLICT: log.warning( - "Rebalancing in progress while creating a connector... Retrying...", + "Rebalancing in progress while creating a connector... Retrying..." ) time.sleep(1) self.create_connector(connector_config) @@ -74,8 +71,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: :return: Information about the connector. """ response = httpx.get( - url=f"{self._host}/connectors/{connector_name}", - headers=HEADERS, + url=f"{self._host}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -86,15 +82,14 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: raise ConnectorNotFoundException elif response.status_code == httpx.codes.CONFLICT: log.warning( - "Rebalancing in progress while getting a connector... Retrying...", + "Rebalancing in progress while getting a connector... Retrying..." ) sleep(1) self.get_connector(connector_name) raise KafkaConnectError(response) def update_connector_config( - self, - connector_config: KafkaConnectorConfig, + self, connector_config: KafkaConnectorConfig ) -> KafkaConnectResponse: """Create or update a connector. @@ -122,15 +117,14 @@ def update_connector_config( return KafkaConnectResponse(**data) elif response.status_code == httpx.codes.CONFLICT: log.warning( - "Rebalancing in progress while updating a connector... Retrying...", + "Rebalancing in progress while updating a connector... Retrying..." ) sleep(1) self.update_connector_config(connector_config) raise KafkaConnectError(response) def validate_connector_config( - self, - connector_config: KafkaConnectorConfig, + self, connector_config: KafkaConnectorConfig ) -> list[str]: """Validate connector config using the given configuration. @@ -146,7 +140,7 @@ def validate_connector_config( if response.status_code == httpx.codes.OK: kafka_connect_error_response = KafkaConnectConfigErrorResponse( - **response.json(), + **response.json() ) errors: list[str] = [] @@ -155,7 +149,7 @@ def validate_connector_config( if len(config.value.errors) > 0: for error in config.value.errors: errors.append( - f"Found error for field {config.value.name}: {error}", + f"Found error for field {config.value.name}: {error}" ) return errors raise KafkaConnectError(response) @@ -169,8 +163,7 @@ def delete_connector(self, connector_name: str) -> None: :raises ConnectorNotFoundException: Connector not found """ response = httpx.delete( - url=f"{self._host}/connectors/{connector_name}", - headers=HEADERS, + url=f"{self._host}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") @@ -180,7 +173,7 @@ def delete_connector(self, connector_name: str) -> None: raise ConnectorNotFoundException elif response.status_code == httpx.codes.CONFLICT: log.warning( - "Rebalancing in progress while deleting a connector... Retrying...", + "Rebalancing in progress while deleting a connector... Retrying..." ) sleep(1) self.delete_connector(connector_name) diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index d4b00d6aa..c810a9c36 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -34,10 +34,7 @@ def __init__( self._timeout = timeout def create_connector( - self, - connector_config: KafkaConnectorConfig, - *, - dry_run: bool, + self, connector_config: KafkaConnectorConfig, *, dry_run: bool ) -> None: """Create a connector. @@ -57,7 +54,7 @@ def create_connector( timeout( lambda: self._connect_wrapper.update_connector_config( - connector_config, + connector_config ), secs=self._timeout, ) @@ -89,12 +86,11 @@ def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None: ) except ConnectorNotFoundException: log.warning( - f"Connector Destruction: the connector {connector_name} does not exist. Skipping.", + f"Connector Destruction: the connector {connector_name} does not exist. Skipping." ) def __dry_run_connector_creation( - self, - connector_config: KafkaConnectorConfig, + self, connector_config: KafkaConnectorConfig ) -> None: connector_name = connector_config.name try: @@ -110,7 +106,7 @@ def __dry_run_connector_creation( except ConnectorNotFoundException: diff = render_diff({}, connector_config.dict()) log.info( - f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}", + f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}" ) log.debug("POST /connectors HTTP/1.1") log.debug(f"HOST: {self._connect_wrapper.host}") @@ -119,12 +115,10 @@ def __dry_run_connector_creation( if len(errors) > 0: formatted_errors = "\n".join(errors) msg = f"Connector Creation: validating the connector config for connector {connector_name} resulted in the following errors: {formatted_errors}" - raise ConnectorStateException( - msg, - ) + raise ConnectorStateException(msg) else: log.info( - f"Connector Creation: connector config for {connector_name} is valid!", + f"Connector Creation: connector config for {connector_name} is valid!" ) def __dry_run_connector_deletion(self, connector_name: str) -> None: @@ -132,14 +126,14 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: self._connect_wrapper.get_connector(connector_name) log.info( magentaify( - f"Connector Destruction: connector {connector_name} already exists. Deleting connector.", - ), + f"Connector Destruction: connector {connector_name} already exists. Deleting connector." + ) ) log.debug(f"DELETE /connectors/{connector_name} HTTP/1.1") log.debug(f"HOST: {self._connect_wrapper.host}") except ConnectorNotFoundException: log.warning( - f"Connector Destruction: connector {connector_name} does not exist and cannot be deleted. Skipping.", + f"Connector Destruction: connector {connector_name} does not exist and cannot be deleted. Skipping." ) @classmethod diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index 398ace4e4..e75ac7361 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -29,5 +29,5 @@ async def main_supervisor(func: Callable[..., T], secs: int) -> T: return loop.run_until_complete(main_supervisor(func, secs)) except TimeoutError: log.exception( - f"Kafka Connect operation {func.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml.", + f"Kafka Connect operation {func.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml." ) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index 1a05ec86c..63d88b726 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -16,12 +16,12 @@ ) from kpops.utils.colorify import greenify, magentaify -log = logging.getLogger("SchemaHandler") - if TYPE_CHECKING: from kpops.cli.pipeline_config import PipelineConfig from kpops.components.base_components.models.to_section import ToSection +log = logging.getLogger("SchemaHandler") + class SchemaHandler: def __init__(self, url: str, components_module: str | None): @@ -33,22 +33,16 @@ def schema_provider(self) -> SchemaProvider: try: if not self.components_module: msg = f"The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your {SchemaProvider.__name__} implementation exists." - raise ValueError( - msg, - ) + raise ValueError(msg) schema_provider_class = find_class(self.components_module, SchemaProvider) return schema_provider_class() # pyright: ignore[reportGeneralTypeIssues] except ClassNotFoundError as e: msg = f"No schema provider found in components module {self.components_module}. Please implement the abstract method in {SchemaProvider.__module__}.{SchemaProvider.__name__}." - raise ValueError( - msg, - ) from e + raise ValueError(msg) from e @classmethod def load_schema_handler( - cls, - components_module: str | None, - config: PipelineConfig, + cls, components_module: str | None, config: PipelineConfig ) -> SchemaHandler | None: if not config.schema_registry_url: return None @@ -64,19 +58,14 @@ def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: key_schema_class = config.key_schema if value_schema_class is not None: schema = self.schema_provider.provide_schema( - value_schema_class, - to_section.models, + value_schema_class, to_section.models ) self.__submit_value_schema( - schema, - value_schema_class, - dry_run, - topic_name, + schema, value_schema_class, dry_run, topic_name ) if key_schema_class is not None: schema = self.schema_provider.provide_schema( - key_schema_class, - to_section.models, + key_schema_class, to_section.models ) self.__submit_key_schema(schema, key_schema_class, dry_run, topic_name) @@ -130,29 +119,25 @@ def __submit_schema( else: log.info( greenify( - f"Schema Submission: The subject {subject} will be submitted.", - ), + f"Schema Submission: The subject {subject} will be submitted." + ) ) else: self.schema_registry_client.register(subject=subject, schema=schema) log.info( - f"Schema Submission: schema submitted for {subject} with model {schema_class}.", + f"Schema Submission: schema submitted for {subject} with model {schema_class}." ) def __subject_exists(self, subject: str) -> bool: return len(self.schema_registry_client.get_versions(subject)) > 0 def __check_compatibility( - self, - schema: Schema, - schema_class: str, - subject: str, + self, schema: Schema, schema_class: str, subject: str ) -> None: registered_version = self.schema_registry_client.check_version(subject, schema) if registered_version is None: if not self.schema_registry_client.test_compatibility( - subject=subject, - schema=schema, + subject=subject, schema=schema ): schema_str = ( schema.flat_schema @@ -160,16 +145,14 @@ def __check_compatibility( else str(schema) ) msg = f"Schema is not compatible for {subject} and model {schema_class}. \n {json.dumps(schema_str, indent=4)}" - raise Exception( - msg, - ) + raise Exception(msg) else: log.debug( - f"Schema Submission: schema was already submitted for the subject {subject} as version {registered_version.schema}. Therefore, the specified schema must be compatible.", + f"Schema Submission: schema was already submitted for the subject {subject} as version {registered_version.schema}. Therefore, the specified schema must be compatible." ) log.info( - f"Schema Submission: compatible schema for {subject} with model {schema_class}.", + f"Schema Submission: compatible schema for {subject} with model {schema_class}." ) def __delete_subject(self, subject: str, dry_run: bool) -> None: @@ -178,5 +161,5 @@ def __delete_subject(self, subject: str, dry_run: bool) -> None: else: version_list = self.schema_registry_client.delete_subject(subject) log.info( - f"Schema Deletion: deleted {len(version_list)} versions for subject {subject}.", + f"Schema Deletion: deleted {len(version_list)} versions for subject {subject}." ) diff --git a/kpops/component_handlers/schema_handler/schema_provider.py b/kpops/component_handlers/schema_handler/schema_provider.py index 253491e9b..0c0423a40 100644 --- a/kpops/component_handlers/schema_handler/schema_provider.py +++ b/kpops/component_handlers/schema_handler/schema_provider.py @@ -5,17 +5,15 @@ from schema_registry.client.schema import AvroSchema, JsonSchema -Schema: TypeAlias = AvroSchema | JsonSchema - if TYPE_CHECKING: from kpops.components.base_components.models import ModelName, ModelVersion +Schema: TypeAlias = AvroSchema | JsonSchema + class SchemaProvider(ABC): @abstractmethod def provide_schema( - self, - schema_class: str, - models: dict[ModelName, ModelVersion], + self, schema_class: str, models: dict[ModelName, ModelVersion] ) -> Schema: ... diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 75888de16..dae606108 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -35,11 +35,10 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: try: self.proxy_wrapper.get_topic(topic_name=topic_name) topic_config_in_cluster = self.proxy_wrapper.get_topic_config( - topic_name=topic_name, + topic_name=topic_name ) differences = self.__get_topic_config_diff( - topic_config_in_cluster, - topic_config.configs, + topic_config_in_cluster, topic_config.configs ) if differences: @@ -47,11 +46,11 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: for difference in differences: if difference.diff_type is DiffType.REMOVE: json_body.append( - {"name": difference.key, "operation": "DELETE"}, + {"name": difference.key, "operation": "DELETE"} ) elif config_value := difference.change.new_value: json_body.append( - {"name": difference.key, "value": config_value}, + {"name": difference.key, "value": config_value} ) self.proxy_wrapper.batch_alter_topic_config( topic_name=topic_name, @@ -60,7 +59,7 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: else: log.info( - f"Topic Creation: config of topic {topic_name} didn't change. Skipping update.", + f"Topic Creation: config of topic {topic_name} didn't change. Skipping update." ) except TopicNotFoundException: self.proxy_wrapper.create_topic(topic_spec=topic_spec) @@ -75,16 +74,15 @@ def delete_topics(self, to_section: ToSection, dry_run: bool) -> None: self.proxy_wrapper.delete_topic(topic_name=topic_name) except TopicNotFoundException: log.warning( - f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping.", + f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping." ) @staticmethod def __get_topic_config_diff( - cluster_config: TopicConfigResponse, - current_config: dict, + cluster_config: TopicConfigResponse, current_config: dict ) -> list[Diff]: comparable_in_cluster_config_dict, _ = parse_rest_proxy_topic_config( - cluster_config, + cluster_config ) return list(Diff.from_dicts(comparable_in_cluster_config_dict, current_config)) @@ -99,11 +97,10 @@ def __dry_run_topic_creation( topic_name = topic_in_cluster.topic_name if topic_config: topic_config_in_cluster = self.proxy_wrapper.get_topic_config( - topic_name=topic_name, + topic_name=topic_name ) in_cluster_config, new_config = parse_and_compare_topic_configs( - topic_config_in_cluster, - topic_config.configs, + topic_config_in_cluster, topic_config.configs ) if diff := render_diff(in_cluster_config, new_config): log.info(f"Config changes for topic {topic_name}:") @@ -123,15 +120,13 @@ def __dry_run_topic_creation( self.__check_partition_count(topic_in_cluster, topic_spec, effective_config) self.__check_replication_factor( - topic_in_cluster, - topic_spec, - effective_config, + topic_in_cluster, topic_spec, effective_config ) except TopicNotFoundException: log.info( greenify( - f"Topic Creation: {topic_name} does not exist in the cluster. Creating topic.", - ), + f"Topic Creation: {topic_name} does not exist in the cluster. Creating topic." + ) ) log.debug(f"POST /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1") log.debug(f"Host: {self.proxy_wrapper.host}") @@ -150,13 +145,11 @@ def __check_partition_count( topic_spec.partitions_count or int(broker_config["num.partitions"]) ): log.debug( - f"Topic Creation: partition count of topic {topic_name} did not change. Current partitions count {partition_count}. Updating configs.", + f"Topic Creation: partition count of topic {topic_name} did not change. Current partitions count {partition_count}. Updating configs." ) else: msg = f"Topic Creation: partition count of topic {topic_name} changed! Partitions count of topic {topic_name} is {partition_count}. The given partitions count {topic_spec.partitions_count}." - raise TopicTransactionError( - msg, - ) + raise TopicTransactionError(msg) @staticmethod def __check_replication_factor( @@ -171,28 +164,26 @@ def __check_replication_factor( or int(broker_config["default.replication.factor"]) ): log.debug( - f"Topic Creation: replication factor of topic {topic_name} did not change. Current replication factor {replication_factor}. Updating configs.", + f"Topic Creation: replication factor of topic {topic_name} did not change. Current replication factor {replication_factor}. Updating configs." ) else: msg = f"Topic Creation: replication factor of topic {topic_name} changed! Replication factor of topic {topic_name} is {replication_factor}. The given replication count {topic_spec.replication_factor}." - raise TopicTransactionError( - msg, - ) + raise TopicTransactionError(msg) def __dry_run_topic_deletion(self, topic_name: str) -> None: try: topic_in_cluster = self.proxy_wrapper.get_topic(topic_name=topic_name) log.info( magentaify( - f"Topic Deletion: topic {topic_in_cluster.topic_name} exists in the cluster. Deleting topic.", - ), + f"Topic Deletion: topic {topic_in_cluster.topic_name} exists in the cluster. Deleting topic." + ) ) log.debug( - f"DELETE /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1", + f"DELETE /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1" ) except TopicNotFoundException: log.warning( - f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping.", + f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping." ) log.debug(f"Host: {self.proxy_wrapper.host}") log.debug(HEADERS) diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 9eb706b96..4edc3633c 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -26,9 +26,7 @@ class ProxyWrapper: def __init__(self, pipeline_config: PipelineConfig) -> None: if not pipeline_config.kafka_rest_host: msg = "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST." - raise ValueError( - msg, - ) + raise ValueError(msg) self._host = pipeline_config.kafka_rest_host diff --git a/kpops/component_handlers/topic/utils.py b/kpops/component_handlers/topic/utils.py index 904833a28..70f71d0b3 100644 --- a/kpops/component_handlers/topic/utils.py +++ b/kpops/component_handlers/topic/utils.py @@ -6,18 +6,17 @@ def parse_and_compare_topic_configs( - topic_config_in_cluster: TopicConfigResponse, - topic_config: dict, + topic_config_in_cluster: TopicConfigResponse, topic_config: dict ) -> tuple[dict, dict]: comparable_in_cluster_config_dict, default_configs = parse_rest_proxy_topic_config( - topic_config_in_cluster, + topic_config_in_cluster ) cluster_defaults_overwrite = set(topic_config.keys()) - set( - comparable_in_cluster_config_dict.keys(), + comparable_in_cluster_config_dict.keys() ) config_overwrites = set(comparable_in_cluster_config_dict.keys()) - set( - topic_config.keys(), + topic_config.keys() ) populate_default_configs( cluster_defaults_overwrite, diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 8f3f0929b..d9100bd25 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -93,24 +93,17 @@ def extend_with_defaults(self, **kwargs) -> dict: config: PipelineConfig = kwargs["config"] log.debug( typer.style( - "Enriching component of type ", - fg=typer.colors.GREEN, - bold=False, + "Enriching component of type ", fg=typer.colors.GREEN, bold=False ) + typer.style( - kwargs.get("type"), - fg=typer.colors.GREEN, - bold=True, - underline=True, - ), + kwargs.get("type"), fg=typer.colors.GREEN, bold=True, underline=True + ) ) main_default_file_path, environment_default_file_path = get_defaults_file_paths( - config, + config ) defaults = load_defaults( - self.__class__, - main_default_file_path, - environment_default_file_path, + self.__class__, main_default_file_path, environment_default_file_path ) return update_nested(kwargs, defaults) @@ -178,7 +171,7 @@ def defaults_from_yaml(path: Path, key: str) -> dict: if value is None: return {} log.debug( - f"\tFound defaults for component type {typer.style(key, bold=True, fg=typer.colors.MAGENTA)} in file: {path}", + f"\tFound defaults for component type {typer.style(key, bold=True, fg=typer.colors.MAGENTA)} in file: {path}" ) return value @@ -195,11 +188,11 @@ def get_defaults_file_paths(config: PipelineConfig) -> tuple[Path, Path]: """ defaults_dir = Path(config.defaults_path).resolve() main_default_file_path = defaults_dir / Path( - config.defaults_filename_prefix, + config.defaults_filename_prefix ).with_suffix(".yaml") environment_default_file_path = defaults_dir / Path( - f"{config.defaults_filename_prefix}_{config.environment}", + f"{config.defaults_filename_prefix}_{config.environment}" ).with_suffix(".yaml") return main_default_file_path, environment_default_file_path diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 1650a9bdf..a13dc7a7d 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -30,8 +30,7 @@ class KafkaStreamsConfig(BaseModel): brokers: str = Field(default=..., description=describe_attr("brokers", __doc__)) schema_registry_url: str | None = Field( - default=None, - description=describe_attr("schema_registry_url", __doc__), + default=None, description=describe_attr("schema_registry_url", __doc__) ) class Config(CamelCaseConfig, DescConfig): @@ -46,12 +45,10 @@ class KafkaAppConfig(KubernetesAppConfig): """ streams: KafkaStreamsConfig = Field( - default=..., - description=describe_attr("streams", __doc__), + default=..., description=describe_attr("streams", __doc__) ) name_override: str | None = Field( - default=None, - description=describe_attr("name_override", __doc__), + default=None, description=describe_attr("name_override", __doc__) ) @@ -92,14 +89,12 @@ def clean_up_helm_chart(self) -> str: def deploy(self, dry_run: bool) -> None: if self.to: self.handlers.topic_handler.create_topics( - to_section=self.to, - dry_run=dry_run, + to_section=self.to, dry_run=dry_run ) if self.handlers.schema_handler: self.handlers.schema_handler.submit_schemas( - to_section=self.to, - dry_run=dry_run, + to_section=self.to, dry_run=dry_run ) super().deploy(dry_run) @@ -118,8 +113,7 @@ def _run_clean_up_job( """ suffix = "-clean" clean_up_release_name = trim_release_name( - self.helm_release_name + suffix, - suffix, + self.helm_release_name + suffix, suffix ) log.info(f"Uninstall old cleanup job for {clean_up_release_name}") @@ -128,10 +122,7 @@ def _run_clean_up_job( log.info(f"Init cleanup job for {clean_up_release_name}") stdout = self.__install_clean_up_job( - clean_up_release_name, - suffix, - values, - dry_run, + clean_up_release_name, suffix, values, dry_run ) if dry_run: diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 6420662a3..96ee68041 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -67,8 +67,7 @@ class KafkaConnector(PipelineComponent, ABC): description=describe_attr("repo_config", __doc__), ) version: str | None = Field( - default="1.0.4", - description=describe_attr("version", __doc__), + default="1.0.4", description=describe_attr("version", __doc__) ) resetter_values: dict = Field( default_factory=dict, @@ -141,14 +140,12 @@ def template_flags(self) -> HelmTemplateFlags: def deploy(self, dry_run: bool) -> None: if self.to: self.handlers.topic_handler.create_topics( - to_section=self.to, - dry_run=dry_run, + to_section=self.to, dry_run=dry_run ) if self.handlers.schema_handler: self.handlers.schema_handler.submit_schemas( - to_section=self.to, - dry_run=dry_run, + to_section=self.to, dry_run=dry_run ) self.handlers.connector_handler.create_connector(self.app, dry_run=dry_run) @@ -156,8 +153,7 @@ def deploy(self, dry_run: bool) -> None: @override def destroy(self, dry_run: bool) -> None: self.handlers.connector_handler.destroy_connector( - self.full_name, - dry_run=dry_run, + self.full_name, dry_run=dry_run ) @override @@ -165,8 +161,7 @@ def clean(self, dry_run: bool) -> None: if self.to: if self.handlers.schema_handler: self.handlers.schema_handler.delete_schemas( - to_section=self.to, - dry_run=dry_run, + to_section=self.to, dry_run=dry_run ) self.handlers.topic_handler.delete_topics(self.to, dry_run=dry_run) @@ -188,24 +183,22 @@ def _run_connect_resetter( """ log.info( magentaify( - f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.full_name}", - ), + f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.full_name}" + ) ) self.__uninstall_connect_resetter(self._resetter_release_name, dry_run) log.info( magentaify( - f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}", - ), + f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}" + ) ) stdout = self.__install_connect_resetter(dry_run, **kwargs) if dry_run: self.dry_run_handler.print_helm_diff( - stdout, - self._resetter_release_name, - log, + stdout, self._resetter_release_name, log ) if not retain_clean_jobs: @@ -369,9 +362,7 @@ def clean(self, dry_run: bool) -> None: self.__run_kafka_connect_resetter(dry_run, delete_consumer_group=True) def __run_kafka_connect_resetter( - self, - dry_run: bool, - delete_consumer_group: bool, + self, dry_run: bool, delete_consumer_group: bool ) -> None: """Run the connector resetter. diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 2e0b44511..ff35459c3 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -25,7 +25,7 @@ log = logging.getLogger("KubernetesAppComponent") KUBERNETES_NAME_CHECK_PATTERN = re.compile( - r"^(?![0-9]+$)(?!.*-$)(?!-)[a-z0-9-.]{1,253}(? str: msg = ( f"Please implement the helm_chart property of the {self.__module__} module." ) - raise NotImplementedError( - msg, - ) + raise NotImplementedError(msg) @property def helm_flags(self) -> HelmFlags: @@ -174,7 +172,7 @@ def print_helm_diff(self, stdout: str) -> None: :param stdout: The output of a Helm command that installs or upgrades the release """ current_release = list( - self.helm.get_manifest(self.helm_release_name, self.namespace), + self.helm.get_manifest(self.helm_release_name, self.namespace) ) if current_release: log.info(f"Helm release {self.helm_release_name} already exists") diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index c416026c9..153133639 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -27,8 +27,7 @@ class FromTopic(BaseModel): """ type: InputTopicTypes | None = Field( - default=None, - description=describe_attr("type", __doc__), + default=None, description=describe_attr("type", __doc__) ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index d56476659..03f1d7141 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -31,9 +31,7 @@ class TopicConfig(BaseModel): """ type: OutputTopicTypes | None = Field( - default=None, - title="Topic type", - description=describe_attr("type", __doc__), + default=None, title="Topic type", description=describe_attr("type", __doc__) ) key_schema: str | None = Field( default=None, @@ -56,8 +54,7 @@ class TopicConfig(BaseModel): description=describe_attr("replication_factor", __doc__), ) configs: dict[str, str | int] = Field( - default={}, - description=describe_attr("configs", __doc__), + default={}, description=describe_attr("configs", __doc__) ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) @@ -83,12 +80,10 @@ class ToSection(BaseModel): """ topics: dict[TopicName, TopicConfig] = Field( - default={}, - description=describe_attr("topics", __doc__), + default={}, description=describe_attr("topics", __doc__) ) models: dict[ModelName, ModelVersion] = Field( - default={}, - description=describe_attr("models", __doc__), + default={}, description=describe_attr("models", __doc__) ) class Config(DescConfig): diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 1e5348948..8af1a68c6 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -15,12 +15,10 @@ class ProducerStreamsConfig(KafkaStreamsConfig): """ extra_output_topics: dict[str, str] = Field( - default={}, - description=describe_attr("extra_output_topics", __doc__), + default={}, description=describe_attr("extra_output_topics", __doc__) ) output_topic: str | None = Field( - default=None, - description=describe_attr("output_topic", __doc__), + default=None, description=describe_attr("output_topic", __doc__) ) @@ -31,8 +29,7 @@ class ProducerValues(KafkaAppConfig): """ streams: ProducerStreamsConfig = Field( - default=..., - description=describe_attr("streams", __doc__), + default=..., description=describe_attr("streams", __doc__) ) class Config(BaseConfig): diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 0433fb5dc..ca2db77ae 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -27,36 +27,28 @@ class StreamsConfig(KafkaStreamsConfig): """ input_topics: list[str] = Field( - default=[], - description=describe_attr("input_topics", __doc__), + default=[], description=describe_attr("input_topics", __doc__) ) input_pattern: str | None = Field( - default=None, - description=describe_attr("input_pattern", __doc__), + default=None, description=describe_attr("input_pattern", __doc__) ) extra_input_topics: dict[str, list[str]] = Field( - default={}, - description=describe_attr("extra_input_topics", __doc__), + default={}, description=describe_attr("extra_input_topics", __doc__) ) extra_input_patterns: dict[str, str] = Field( - default={}, - description=describe_attr("extra_input_patterns", __doc__), + default={}, description=describe_attr("extra_input_patterns", __doc__) ) extra_output_topics: dict[str, str] = Field( - default={}, - description=describe_attr("extra_output_topics", __doc__), + default={}, description=describe_attr("extra_output_topics", __doc__) ) output_topic: str | None = Field( - default=None, - description=describe_attr("output_topic", __doc__), + default=None, description=describe_attr("output_topic", __doc__) ) error_topic: str | None = Field( - default=None, - description=describe_attr("error_topic", __doc__), + default=None, description=describe_attr("error_topic", __doc__) ) config: dict[str, str] = Field( - default={}, - description=describe_attr("config", __doc__), + default={}, description=describe_attr("config", __doc__) ) def add_input_topics(self, topics: list[str]) -> None: @@ -77,7 +69,7 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: :param role: Topic role """ self.extra_input_topics[role] = deduplicate( - self.extra_input_topics.get(role, []) + topics, + self.extra_input_topics.get(role, []) + topics ) @override diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 96588beee..920eec202 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -69,14 +69,12 @@ def validate_unique_names(self) -> None: duplicates = [name for name, count in Counter(step_names).items() if count > 1] if duplicates: msg = f"step names should be unique. duplicate step names: {', '.join(duplicates)}" - raise ValidationError( - msg, - ) + raise ValidationError(msg) @staticmethod def _populate_component_name(component: PipelineComponent) -> None: # TODO: remove with suppress( - AttributeError, # Some components like Kafka Connect do not have a name_override attribute + AttributeError # Some components like Kafka Connect do not have a name_override attribute ): if (app := getattr(component, "app")) and app.name_override is None: app.name_override = component.full_name @@ -94,9 +92,7 @@ def create_env_components_index( for component in environment_components: if "type" not in component or "name" not in component: msg = "To override components per environment, every component should at least have a type and a name." - raise ValueError( - msg, - ) + raise ValueError(msg) index[component["name"]] = component return index @@ -145,17 +141,13 @@ def load_from_yaml( main_content = load_yaml_file(path, substitution=ENV) if not isinstance(main_content, list): msg = f"The pipeline definition {path} should contain a list of components" - raise TypeError( - msg, - ) + raise TypeError(msg) env_content = [] if (env_file := Pipeline.pipeline_filename_environment(path, config)).exists(): env_content = load_yaml_file(env_file, substitution=ENV) if not isinstance(env_content, list): msg = f"The pipeline definition {env_file} should contain a list of components" - raise TypeError( - msg, - ) + raise TypeError(msg) return cls(main_content, env_content, registry, config, handlers) @@ -173,24 +165,18 @@ def parse_components(self, component_list: list[dict]) -> None: component_type: str = component_data["type"] except KeyError as ke: msg = "Every component must have a type defined, this component does not have one." - raise ValueError( - msg, - ) from ke + raise ValueError(msg) from ke component_class = self.registry[component_type] self.apply_component(component_class, component_data) except Exception as ex: # noqa: BLE001 if "name" in component_data: msg = f"Error enriching {component_data['type']} component {component_data['name']}" - raise ParsingException( - msg, - ) from ex + raise ParsingException(msg) from ex else: raise ParsingException from ex def apply_component( - self, - component_class: type[PipelineComponent], - component_data: dict, + self, component_class: type[PipelineComponent], component_data: dict ) -> None: """Instantiate, enrich and inflate pipeline component. @@ -217,15 +203,14 @@ def apply_component( from_topic, ) in enriched_component.from_.components.items(): original_from_component = self.components.find( - original_from_component_name, + original_from_component_name ) inflated_from_component = original_from_component.inflate()[-1] resolved_from_component = self.components.find( - inflated_from_component.name, + inflated_from_component.name ) enriched_component.weave_from_topics( - resolved_from_component.to, - from_topic, + resolved_from_component.to, from_topic ) elif self.components: # read from previous component @@ -273,7 +258,7 @@ def print_yaml(self, substitution: dict | None = None) -> None: theme="ansi_dark", ) Console( - width=1000, # HACK: overwrite console width to avoid truncating output + width=1000 # HACK: overwrite console width to avoid truncating output ).print(syntax) def __iter__(self) -> Iterator[PipelineComponent]: @@ -282,8 +267,8 @@ def __iter__(self) -> Iterator[PipelineComponent]: def __str__(self) -> str: return yaml.dump( json.loads( # HACK: serialize types on Pydantic model export, which are not serialized by .dict(); e.g. pathlib.Path - self.components.json(exclude_none=True, by_alias=True), - ), + self.components.json(exclude_none=True, by_alias=True) + ) ) def __len__(self) -> int: @@ -309,15 +294,14 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: substitution_hardcoded, ) substitution = generate_substitution( - json.loads(config.json()), - existing_substitution=component_substitution, + json.loads(config.json()), existing_substitution=component_substitution ) return json.loads( substitute_nested( json.dumps(component_as_dict), **update_nested_pair(substitution, ENV), - ), + ) ) def validate(self) -> None: diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index 5bc8d720a..934924e21 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -54,9 +54,7 @@ class Diff(Generic[T]): @staticmethod def from_dicts( - d1: dict, - d2: dict, - ignore: set[str] | None = None, + d1: dict, d2: dict, ignore: set[str] | None = None ) -> Iterator[Diff]: for diff_type, keys, changes in diff(d1, d2, ignore=ignore): if not isinstance(changes_tmp := changes, list): @@ -91,8 +89,8 @@ def render_diff(d1: dict, d2: dict, ignore: set[str] | None = None) -> str | Non differ.compare( to_yaml(d1) if d1 else "", to_yaml(d2_filtered) if d2_filtered else "", - ), - ), + ) + ) ) diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index d3c173edc..14cc849e3 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -46,9 +46,7 @@ def update_nested(*argv: dict) -> dict: def flatten_mapping( - nested_mapping: Mapping[str, Any], - prefix: str | None = None, - separator: str = "_", + nested_mapping: Mapping[str, Any], prefix: str | None = None, separator: str = "_" ) -> dict[str, Any]: """Flattens a Mapping. diff --git a/kpops/utils/environment.py b/kpops/utils/environment.py index b1b2271b4..0ed7ae920 100644 --- a/kpops/utils/environment.py +++ b/kpops/utils/environment.py @@ -13,7 +13,7 @@ def __init__(self, mapping=None, /, **kwargs) -> None: mapping = {} if kwargs: mapping.update( - {transformation(key): value for key, value in kwargs.items()}, + {transformation(key): value for key, value in kwargs.items()} ) super().__init__(mapping) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index c1d96ce5c..7cad9422d 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -38,8 +38,7 @@ def field_schema(field: ModelField, **kwargs: Any) -> Any: def _is_valid_component( - defined_component_types: set[str], - component: type[PipelineComponent], + defined_component_types: set[str], component: type[PipelineComponent] ) -> bool: """Check whether a PipelineComponent subclass has a valid definition for the schema generation. @@ -58,8 +57,7 @@ def _is_valid_component( def _add_components( - components_module: str, - components: tuple[type[PipelineComponent]] | None = None, + components_module: str, components: tuple[type[PipelineComponent]] | None = None ) -> tuple[type[PipelineComponent]]: """Add components to a components tuple. @@ -85,8 +83,7 @@ def _add_components( def gen_pipeline_schema( - components_module: str | None = None, - include_stock_components: bool = True, + components_module: str | None = None, include_stock_components: bool = True ) -> None: """Generate a json schema from the models of pipeline components. @@ -128,8 +125,7 @@ def gen_pipeline_schema( ) AnnotatedPipelineComponents = Annotated[ - PipelineComponents, - Field(discriminator="type"), + PipelineComponents, Field(discriminator="type") ] schema = schema_json_of( @@ -145,9 +141,6 @@ def gen_pipeline_schema( def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config.""" schema = schema_json_of( - PipelineConfig, - title="KPOps config schema", - indent=4, - sort_keys=True, + PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True ) print(schema) diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml_loading.py index b587ae1e4..fb810c193 100644 --- a/kpops/utils/yaml_loading.py +++ b/kpops/utils/yaml_loading.py @@ -9,8 +9,7 @@ def generate_hashkey( - file_path: Path, - substitution: Mapping[str, Any] | None = None, + file_path: Path, substitution: Mapping[str, Any] | None = None ) -> tuple: if substitution is None: substitution = {} @@ -19,9 +18,7 @@ def generate_hashkey( @cached(cache={}, key=generate_hashkey) def load_yaml_file( - file_path: Path, - *, - substitution: Mapping[str, Any] | None = None, + file_path: Path, *, substitution: Mapping[str, Any] | None = None ) -> dict | list[dict]: with file_path.open() as yaml_file: return yaml.load(substitute(yaml_file.read(), substitution), Loader=yaml.Loader) @@ -74,7 +71,5 @@ def substitute_nested(input: str, **kwargs) -> str: old_str, new_str = new_str, substitute(new_str, kwargs) if new_str != old_str: msg = "An infinite loop condition detected. Check substitution variables." - raise ValueError( - msg, - ) + raise ValueError(msg) return old_str diff --git a/tests/cli/resources/module.py b/tests/cli/resources/module.py index 3691e53e1..3956eedf2 100644 --- a/tests/cli/resources/module.py +++ b/tests/cli/resources/module.py @@ -9,8 +9,6 @@ class CustomSchemaProvider(SchemaProvider): def provide_schema( - self, - schema_class: str, - models: dict[ModelName, ModelVersion], + self, schema_class: str, models: dict[ModelName, ModelVersion] ) -> Schema: return AvroSchema() diff --git a/tests/cli/test_pipeline_steps.py b/tests/cli/test_pipeline_steps.py index 8b4c6c6e3..a09d7b064 100644 --- a/tests/cli/test_pipeline_steps.py +++ b/tests/cli/test_pipeline_steps.py @@ -45,9 +45,7 @@ def log_info(mocker: MockerFixture) -> MagicMock: def tests_filter_steps_to_apply(log_info: MagicMock, pipeline: Pipeline): filtered_steps = get_steps_to_apply( - pipeline, - "example2,example3", - FilterType.INCLUDE, + pipeline, "example2,example3", FilterType.INCLUDE ) assert len(filtered_steps) == 2 @@ -56,7 +54,7 @@ def tests_filter_steps_to_apply(log_info: MagicMock, pipeline: Pipeline): assert log_info.call_count == 1 log_info.assert_any_call( - "The following steps are included:\n['example2', 'example3']", + "The following steps are included:\n['example2', 'example3']" ) filtered_steps = get_steps_to_apply(pipeline, None, FilterType.INCLUDE) @@ -68,9 +66,7 @@ def tests_filter_steps_to_apply(log_info: MagicMock, pipeline: Pipeline): def tests_filter_steps_to_exclude(log_info: MagicMock, pipeline: Pipeline): filtered_steps = get_steps_to_apply( - pipeline, - "example2,example3", - FilterType.EXCLUDE, + pipeline, "example2,example3", FilterType.EXCLUDE ) assert len(filtered_steps) == 1 diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index 1d61368d1..cbb855d14 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -78,8 +78,7 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent): """ example_attr: str = Field( - default=..., - description=describe_attr("example_attr", __doc__), + default=..., description=describe_attr("example_attr", __doc__) ) @@ -87,10 +86,7 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent): @pytest.mark.filterwarnings( - "ignore:handlers", - "ignore:config", - "ignore:enrich", - "ignore:validate", + "ignore:handlers", "ignore:config", "ignore:enrich", "ignore:validate" ) class TestGenSchema: def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): @@ -108,7 +104,7 @@ def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): "root", logging.WARNING, "No components are provided, no schema is generated.", - ), + ) ] assert result.exit_code == 0 diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 6561197a1..f0a1b1b1e 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -51,8 +51,7 @@ def test_should_set_pipeline_name_with_absolute_base_dir(): def test_should_not_set_pipeline_name_with_the_same_base_dir(): with pytest.raises( - ValueError, - match="The pipeline-base-dir should not equal the pipeline-path", + ValueError, match="The pipeline-base-dir should not equal the pipeline-path" ): Pipeline.set_pipeline_name_env_vars(PIPELINE_PATH, PIPELINE_PATH) diff --git a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py index 0f21a970c..bad4f2aa8 100644 --- a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py +++ b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py @@ -15,13 +15,13 @@ class TestDryRunHandler: @pytest.fixture() def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm", + "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm" ).return_value @pytest.fixture() def helm_diff_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.helm_wrapper.dry_run_handler.HelmDiff", + "kpops.component_handlers.helm_wrapper.dry_run_handler.HelmDiff" ).return_value def test_should_print_helm_diff_when_release_is_new( @@ -42,8 +42,7 @@ def test_should_print_helm_diff_when_release_is_new( dry_run_handler.print_helm_diff("A test stdout", "a-release-name", log) helm_mock.get_manifest.assert_called_once_with( - "a-release-name", - "test-namespace", + "a-release-name", "test-namespace" ) assert "Helm release a-release-name does not exist" in caplog.text mock_load_manifest.assert_called_once_with("A test stdout") @@ -56,7 +55,7 @@ def test_should_print_helm_diff_when_release_exists( caplog: LogCaptureFixture, ): helm_mock.get_manifest.return_value = iter( - [HelmTemplate("path.yaml", {"a": 1})], + [HelmTemplate("path.yaml", {"a": 1})] ) mock_load_manifest = mocker.patch( "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm.load_manifest", @@ -68,8 +67,7 @@ def test_should_print_helm_diff_when_release_exists( dry_run_handler.print_helm_diff("A test stdout", "a-release-name", log) helm_mock.get_manifest.assert_called_once_with( - "a-release-name", - "test-namespace", + "a-release-name", "test-namespace" ) assert "Helm release a-release-name already exists" in caplog.text mock_load_manifest.assert_called_once_with("A test stdout") diff --git a/tests/component_handlers/helm_wrapper/test_helm_diff.py b/tests/component_handlers/helm_wrapper/test_helm_diff.py index fc423cf20..15a58a023 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_diff.py +++ b/tests/component_handlers/helm_wrapper/test_helm_diff.py @@ -24,7 +24,7 @@ def test_diff(): HelmTemplate("a.yaml", {"a": 2}), HelmTemplate("c.yaml", {"c": 1}), ], - ), + ) ) == [ Change( old_value={"a": 1}, @@ -42,7 +42,7 @@ def test_diff(): # test no current release assert list( - helm_diff.calculate_changes((), [HelmTemplate("a.yaml", {"a": 1})]), + helm_diff.calculate_changes((), [HelmTemplate("a.yaml", {"a": 1})]) ) == [ Change( old_value={}, diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py index e8870de85..ce6fae709 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py +++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py @@ -44,9 +44,7 @@ def mock_get_version(self, mocker: MockerFixture) -> MagicMock: return mock_get_version def test_should_call_run_command_method_when_helm_install_with_defaults( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -76,9 +74,7 @@ def test_should_call_run_command_method_when_helm_install_with_defaults( ) def test_should_include_configured_tls_parameters_on_add_when_version_is_old( - self, - run_command: MagicMock, - mocker: MockerFixture, + self, run_command: MagicMock, mocker: MockerFixture ): mock_get_version = mocker.patch.object(Helm, "get_version") mock_get_version.return_value = Version(major=3, minor=6, patch=0) @@ -108,9 +104,7 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_old( ] def test_should_include_configured_tls_parameters_on_add_when_version_is_new( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm = Helm(HelmConfig()) @@ -138,9 +132,7 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_new( ] def test_should_include_configured_tls_parameters_on_update( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.upgrade_install( @@ -176,9 +168,7 @@ def test_should_include_configured_tls_parameters_on_update( ) def test_should_call_run_command_method_when_helm_install_with_non_defaults( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.upgrade_install( @@ -223,9 +213,7 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults( ) def test_should_call_run_command_method_when_uninstalling_streams_app( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm_wrapper = Helm(helm_config=HelmConfig()) helm_wrapper.uninstall( @@ -252,13 +240,11 @@ def test_should_log_warning_when_release_not_found( ) log_warning_mock.assert_called_once_with( - "Release with name test-release not found. Could not uninstall app.", + "Release with name test-release not found. Could not uninstall app." ) def test_should_call_run_command_method_when_installing_streams_app__with_dry_run( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -281,7 +267,7 @@ def test_should_call_run_command_method_when_installing_streams_app__with_dry_ru def test_validate_console_output(self): with pytest.raises(RuntimeError): Helm.parse_helm_command_stderr_output( - "A specific\n eRrOr was found in this line", + "A specific\n eRrOr was found in this line" ) with pytest.raises(ReleaseNotFoundException): Helm.parse_helm_command_stderr_output("New \nmessage\n ReLease: noT foUnD") @@ -289,13 +275,13 @@ def test_validate_console_output(self): Helm.parse_helm_command_stderr_output("This is \njust WaRnIng nothing more") except RuntimeError as e: pytest.fail( - f"validate_console_output() raised RuntimeError unexpectedly!\nError message: {e}", + f"validate_console_output() raised RuntimeError unexpectedly!\nError message: {e}" ) try: Helm.parse_helm_command_stderr_output("This is \njust WaRnIng nothing more") except ReleaseNotFoundException: pytest.fail( - f"validate_console_output() raised ReleaseNotFoundException unexpectedly!\nError message: {ReleaseNotFoundException}", + f"validate_console_output() raised ReleaseNotFoundException unexpectedly!\nError message: {ReleaseNotFoundException}" ) def test_helm_template_load(self): @@ -308,7 +294,7 @@ def test_helm_template_load(self): metadata: labels: foo: bar - """, + """ ) helm_template = HelmTemplate.load("test2.yaml", stdout) @@ -331,7 +317,7 @@ def test_load_manifest_with_no_notes(self): --- # Source: chart/templates/test3b.yaml foo: bar - """, + """ ) helm_templates = list(Helm.load_manifest(stdout)) assert len(helm_templates) == 2 @@ -348,7 +334,7 @@ def test_raise_parse_error_when_helm_content_is_invalid(self): """ --- # Resource: chart/templates/test1.yaml - """, + """ ) with pytest.raises(ParseError, match="Not a valid Helm template source"): list(Helm.load_manifest(stdout)) @@ -399,7 +385,7 @@ def test_load_manifest(self): NOTES: test - """, + """ ) helm_templates = list(Helm.load_manifest(stdout)) assert len(helm_templates) == 2 @@ -412,9 +398,7 @@ def test_load_manifest(self): assert helm_templates[1].template == {"foo": "bar"} def test_helm_get_manifest( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm_wrapper = Helm(helm_config=HelmConfig()) run_command.return_value = dedent( @@ -424,10 +408,10 @@ def test_helm_get_manifest( data: - a: 1 - b: 2 - """, + """ ) helm_templates = list( - helm_wrapper.get_manifest("test-release", "test-namespace"), + helm_wrapper.get_manifest("test-release", "test-namespace") ) run_command.assert_called_once_with( command=[ @@ -447,9 +431,7 @@ def test_helm_get_manifest( assert helm_wrapper.get_manifest("test-release", "test-namespace") == () def test_should_call_run_command_method_when_helm_template_with_optional_args( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -487,9 +469,7 @@ def test_should_call_run_command_method_when_helm_template_with_optional_args( ) def test_should_call_run_command_method_when_helm_template_without_optional_args( - self, - run_command: MagicMock, - mock_get_version: MagicMock, + self, run_command: MagicMock, mock_get_version: MagicMock ): helm_wrapper = Helm(helm_config=HelmConfig()) @@ -545,8 +525,7 @@ def test_should_call_helm_version( assert helm._version == expected_version def test_should_raise_exception_if_helm_version_is_old( - self, - run_command: MagicMock, + self, run_command: MagicMock ): run_command.return_value = "v2.9.0+gc9f554d" with pytest.raises( @@ -556,12 +535,10 @@ def test_should_raise_exception_if_helm_version_is_old( Helm(helm_config=HelmConfig()) def test_should_raise_exception_if_helm_version_cannot_be_parsed( - self, - run_command: MagicMock, + self, run_command: MagicMock ): run_command.return_value = "123" with pytest.raises( - RuntimeError, - match="Could not parse the Helm version.\n\nHelm output:\n123", + RuntimeError, match="Could not parse the Helm version.\n\nHelm output:\n123" ): Helm(helm_config=HelmConfig()) diff --git a/tests/component_handlers/helm_wrapper/test_utils.py b/tests/component_handlers/helm_wrapper/test_utils.py index 8f40b0c5d..eef6ca14f 100644 --- a/tests/component_handlers/helm_wrapper/test_utils.py +++ b/tests/component_handlers/helm_wrapper/test_utils.py @@ -12,7 +12,7 @@ def test_trim_release_name_with_suffix(): def test_trim_release_name_without_suffix(): name = trim_release_name( - "example-component-name-too-long-fake-fakefakefakefakefake", + "example-component-name-too-long-fake-fakefakefakefakefake" ) assert name == "example-component-name-too-long-fake-fakefakefakefak" assert len(name) == 52 diff --git a/tests/component_handlers/kafka_connect/test_connect_handler.py b/tests/component_handlers/kafka_connect/test_connect_handler.py index fe6bc473e..db64690e9 100644 --- a/tests/component_handlers/kafka_connect/test_connect_handler.py +++ b/tests/component_handlers/kafka_connect/test_connect_handler.py @@ -25,25 +25,25 @@ class TestConnectorHandler: @pytest.fixture() def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.info", + "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.info" ) @pytest.fixture() def log_warning_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.warning", + "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.warning" ) @pytest.fixture() def log_error_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.error", + "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.error" ) @pytest.fixture() def renderer_diff_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.kafka_connect.kafka_connect_handler.render_diff", + "kpops.component_handlers.kafka_connect.kafka_connect_handler.render_diff" ) @staticmethod @@ -59,7 +59,7 @@ def connector_config(self) -> KafkaConnectorConfig: **{ "connector.class": "com.bakdata.connect.TestConnector", "name": CONNECTOR_NAME, - }, + } ) def test_should_create_connector_in_dry_run( @@ -75,15 +75,15 @@ def test_should_create_connector_in_dry_run( handler.create_connector(connector_config, dry_run=True) connector_wrapper.get_connector.assert_called_once_with(CONNECTOR_NAME) connector_wrapper.validate_connector_config.assert_called_once_with( - connector_config, + connector_config ) assert log_info_mock.mock_calls == [ mock.call.log_info( - f"Connector Creation: connector {CONNECTOR_NAME} already exists.", + f"Connector Creation: connector {CONNECTOR_NAME} already exists." ), mock.call.log_info( - f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!", + f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!" ), ] @@ -109,10 +109,10 @@ def test_should_log_correct_message_when_create_connector_and_connector_not_exis assert log_info_mock.mock_calls == [ mock.call( - f"Connector Creation: connector {CONNECTOR_NAME} does not exist. Creating connector with config:\n\x1b[32m+ connector.class: org.apache.kafka.connect.file.FileStreamSinkConnector\n\x1b[0m\x1b[32m+ name: {CONNECTOR_NAME}\n\x1b[0m\x1b[32m+ tasks.max: '1'\n\x1b[0m\x1b[32m+ topics: {TOPIC_NAME}\n\x1b[0m", + f"Connector Creation: connector {CONNECTOR_NAME} does not exist. Creating connector with config:\n\x1b[32m+ connector.class: org.apache.kafka.connect.file.FileStreamSinkConnector\n\x1b[0m\x1b[32m+ name: {CONNECTOR_NAME}\n\x1b[0m\x1b[32m+ tasks.max: '1'\n\x1b[0m\x1b[32m+ topics: {TOPIC_NAME}\n\x1b[0m" ), mock.call( - f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!", + f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!" ), ] @@ -134,7 +134,7 @@ def test_should_log_correct_message_when_create_connector_and_connector_exists_i "tasks": [], } connector_wrapper.get_connector.return_value = KafkaConnectResponse( - **actual_response, + **actual_response ) configs = { @@ -147,25 +147,23 @@ def test_should_log_correct_message_when_create_connector_and_connector_exists_i handler.create_connector(connector_config, dry_run=True) connector_wrapper.get_connector.assert_called_once_with(CONNECTOR_NAME) connector_wrapper.validate_connector_config.assert_called_once_with( - connector_config, + connector_config ) assert log_info_mock.mock_calls == [ mock.call( - f"Connector Creation: connector {CONNECTOR_NAME} already exists.", + f"Connector Creation: connector {CONNECTOR_NAME} already exists." ), mock.call( - f"Updating config:\n connector.class: org.apache.kafka.connect.file.FileStreamSinkConnector\n name: {CONNECTOR_NAME}\n\x1b[31m- tasks.max: '1'\n\x1b[0m\x1b[33m? ^\n\x1b[0m\x1b[32m+ tasks.max: '2'\n\x1b[0m\x1b[33m? ^\n\x1b[0m topics: {TOPIC_NAME}\n", + f"Updating config:\n connector.class: org.apache.kafka.connect.file.FileStreamSinkConnector\n name: {CONNECTOR_NAME}\n\x1b[31m- tasks.max: '1'\n\x1b[0m\x1b[33m? ^\n\x1b[0m\x1b[32m+ tasks.max: '2'\n\x1b[0m\x1b[33m? ^\n\x1b[0m topics: {TOPIC_NAME}\n" ), mock.call( - f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!", + f"Connector Creation: connector config for {CONNECTOR_NAME} is valid!" ), ] def test_should_log_invalid_config_when_create_connector_in_dry_run( - self, - connector_config: KafkaConnectorConfig, - renderer_diff_mock: MagicMock, + self, connector_config: KafkaConnectorConfig, renderer_diff_mock: MagicMock ): connector_wrapper = MagicMock() @@ -186,12 +184,11 @@ def test_should_log_invalid_config_when_create_connector_in_dry_run( handler.create_connector(connector_config, dry_run=True) connector_wrapper.validate_connector_config.assert_called_once_with( - connector_config, + connector_config ) def test_should_call_update_connector_config_when_connector_exists_not_dry_run( - self, - connector_config: KafkaConnectorConfig, + self, connector_config: KafkaConnectorConfig ): connector_wrapper = MagicMock() handler = self.connector_handler(connector_wrapper) @@ -204,8 +201,7 @@ def test_should_call_update_connector_config_when_connector_exists_not_dry_run( ] def test_should_call_create_connector_when_connector_does_not_exists_not_dry_run( - self, - connector_config: KafkaConnectorConfig, + self, connector_config: KafkaConnectorConfig ): connector_wrapper = MagicMock() @@ -228,8 +224,8 @@ def test_should_print_correct_log_when_destroying_connector_in_dry_run( log_info_mock.assert_called_once_with( magentaify( - f"Connector Destruction: connector {CONNECTOR_NAME} already exists. Deleting connector.", - ), + f"Connector Destruction: connector {CONNECTOR_NAME} already exists. Deleting connector." + ) ) def test_should_print_correct_warning_log_when_destroying_connector_and_connector_exists_in_dry_run( @@ -244,7 +240,7 @@ def test_should_print_correct_warning_log_when_destroying_connector_and_connecto handler.destroy_connector(CONNECTOR_NAME, dry_run=True) log_warning_mock.assert_called_once_with( - f"Connector Destruction: connector {CONNECTOR_NAME} does not exist and cannot be deleted. Skipping.", + f"Connector Destruction: connector {CONNECTOR_NAME} does not exist and cannot be deleted. Skipping." ) def test_should_call_delete_connector_when_destroying_existing_connector_not_dry_run( @@ -271,5 +267,5 @@ def test_should_print_correct_warning_log_when_destroying_connector_and_connecto handler.destroy_connector(CONNECTOR_NAME, dry_run=False) log_warning_mock.assert_called_once_with( - f"Connector Destruction: the connector {CONNECTOR_NAME} does not exist. Skipping.", + f"Connector Destruction: the connector {CONNECTOR_NAME} does not exist. Skipping." ) diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 1b1793109..8e60d92a7 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -40,7 +40,7 @@ def connector_config(self) -> KafkaConnectorConfig: **{ "connector.class": "com.bakdata.connect.TestConnector", "name": "test-connector", - }, + } ) def test_should_through_exception_when_host_is_not_set(self): @@ -58,8 +58,7 @@ def test_should_through_exception_when_host_is_not_set(self): @patch("httpx.post") def test_should_create_post_requests_for_given_connector_configuration( - self, - mock_post: MagicMock, + self, mock_post: MagicMock ): configs = { "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", @@ -85,9 +84,7 @@ def test_should_create_post_requests_for_given_connector_configuration( ) def test_should_return_correct_response_when_connector_created( - self, - httpx_mock: HTTPXMock, - connector_config: KafkaConnectorConfig, + self, httpx_mock: HTTPXMock, connector_config: KafkaConnectorConfig ): actual_response = { "name": "hdfs-sink-connector", @@ -138,7 +135,7 @@ def test_should_raise_connector_exists_exception_when_connector_exists( ) log_warning.assert_called_with( - "Rebalancing in progress while creating a connector... Retrying...", + "Rebalancing in progress while creating a connector... Retrying..." ) @patch("httpx.get") @@ -155,9 +152,7 @@ def test_should_create_correct_get_connector_request(self, mock_get: MagicMock): @pytest.mark.flaky(reruns=5, condition=sys.platform.startswith("win32")) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_return_correct_response_when_getting_connector( - self, - log_info: MagicMock, - httpx_mock: HTTPXMock, + self, log_info: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -192,9 +187,7 @@ def test_should_return_correct_response_when_getting_connector( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_raise_connector_not_found_when_getting_connector( - self, - log_info: MagicMock, - httpx_mock: HTTPXMock, + self, log_info: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -209,14 +202,12 @@ def test_should_raise_connector_not_found_when_getting_connector( self.connect_wrapper.get_connector(connector_name) log_info.assert_called_once_with( - f"The named connector {connector_name} does not exists.", + f"The named connector {connector_name} does not exists." ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") def test_should_raise_rebalance_in_progress_when_getting_connector( - self, - log_warning: MagicMock, - httpx_mock: HTTPXMock, + self, log_warning: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -234,7 +225,7 @@ def test_should_raise_rebalance_in_progress_when_getting_connector( ) log_warning.assert_called_with( - "Rebalancing in progress while getting a connector... Retrying...", + "Rebalancing in progress while getting a connector... Retrying..." ) @patch("httpx.put") @@ -252,7 +243,7 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc } with pytest.raises(KafkaConnectError): self.connect_wrapper.update_connector_config( - KafkaConnectorConfig(**configs), + KafkaConnectorConfig(**configs) ) mock_put.assert_called_with( @@ -296,11 +287,11 @@ def test_should_return_correct_response_when_update_connector( status_code=200, ) expected_response = self.connect_wrapper.update_connector_config( - connector_config, + connector_config ) assert KafkaConnectResponse(**actual_response) == expected_response log_info.assert_called_once_with( - f"Config for connector {connector_name} updated.", + f"Config for connector {connector_name} updated." ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") @@ -338,7 +329,7 @@ def test_should_return_correct_response_when_update_connector_created( status_code=201, ) expected_response = self.connect_wrapper.update_connector_config( - connector_config, + connector_config ) assert KafkaConnectResponse(**actual_response) == expected_response log_info.assert_called_once_with(f"Connector {connector_name} created.") @@ -366,13 +357,12 @@ def test_should_raise_connector_exists_exception_when_update_connector( ) log_warning.assert_called_with( - "Rebalancing in progress while updating a connector... Retrying...", + "Rebalancing in progress while updating a connector... Retrying..." ) @patch("httpx.delete") def test_should_create_correct_delete_connector_request( - self, - mock_delete: MagicMock, + self, mock_delete: MagicMock ): connector_name = "test-connector" with pytest.raises(KafkaConnectError): @@ -385,9 +375,7 @@ def test_should_create_correct_delete_connector_request( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_return_correct_response_when_deleting_connector( - self, - log_info: MagicMock, - httpx_mock: HTTPXMock, + self, log_info: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -422,9 +410,7 @@ def test_should_return_correct_response_when_deleting_connector( @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") def test_should_raise_connector_not_found_when_deleting_connector( - self, - log_info: MagicMock, - httpx_mock: HTTPXMock, + self, log_info: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -439,14 +425,12 @@ def test_should_raise_connector_not_found_when_deleting_connector( self.connect_wrapper.delete_connector(connector_name) log_info.assert_called_once_with( - f"The named connector {connector_name} does not exists.", + f"The named connector {connector_name} does not exists." ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") def test_should_raise_rebalance_in_progress_when_deleting_connector( - self, - log_warning: MagicMock, - httpx_mock: HTTPXMock, + self, log_warning: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -464,13 +448,12 @@ def test_should_raise_rebalance_in_progress_when_deleting_connector( ) log_warning.assert_called_with( - "Rebalancing in progress while deleting a connector... Retrying...", + "Rebalancing in progress while deleting a connector... Retrying..." ) @patch("httpx.put") def test_should_create_correct_validate_connector_config_request( - self, - mock_put: MagicMock, + self, mock_put: MagicMock ): connector_config = KafkaConnectorConfig( **{ @@ -478,7 +461,7 @@ def test_should_create_correct_validate_connector_config_request( "name": "FileStreamSinkConnector", "tasks.max": "1", "topics": "test-topic", - }, + } ) with pytest.raises(KafkaConnectError): self.connect_wrapper.validate_connector_config(connector_config) @@ -491,8 +474,7 @@ def test_should_create_correct_validate_connector_config_request( @patch("httpx.put") def test_should_create_correct_validate_connector_config_and_name_gets_added( - self, - mock_put: MagicMock, + self, mock_put: MagicMock ): connector_name = "FileStreamSinkConnector" configs = { @@ -503,7 +485,7 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( } with pytest.raises(KafkaConnectError): self.connect_wrapper.validate_connector_config( - KafkaConnectorConfig(**configs), + KafkaConnectorConfig(**configs) ) mock_put.assert_called_with( @@ -532,9 +514,9 @@ def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): "topics": "test-topic", } errors = self.connect_wrapper.validate_connector_config( - KafkaConnectorConfig(**configs), + KafkaConnectorConfig(**configs) ) assert errors == [ - "Found error for field file: Missing required configuration 'file' which has no default value.", + "Found error for field file: Missing required configuration 'file' which has no default value." ] diff --git a/tests/component_handlers/schema_handler/resources/module.py b/tests/component_handlers/schema_handler/resources/module.py index 7be7b4fca..8c7168efa 100644 --- a/tests/component_handlers/schema_handler/resources/module.py +++ b/tests/component_handlers/schema_handler/resources/module.py @@ -9,8 +9,6 @@ class CustomSchemaProvider(SchemaProvider): def provide_schema( - self, - schema_class: str, - models: dict[ModelName, ModelVersion], + self, schema_class: str, models: dict[ModelName, ModelVersion] ) -> Schema: return AvroSchema({}) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index df516de19..8f5b0f29e 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -28,28 +28,28 @@ @pytest.fixture(autouse=True) def log_info_mock(mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.log.info", + "kpops.component_handlers.schema_handler.schema_handler.log.info" ) @pytest.fixture(autouse=True) def log_debug_mock(mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.log.debug", + "kpops.component_handlers.schema_handler.schema_handler.log.debug" ) @pytest.fixture(autouse=False) def find_class_mock(mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.find_class", + "kpops.component_handlers.schema_handler.schema_handler.find_class" ) @pytest.fixture(autouse=True) def schema_registry_mock(mocker: MockerFixture) -> MagicMock: schema_registry_mock = mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.SchemaRegistryClient", + "kpops.component_handlers.schema_handler.schema_handler.SchemaRegistryClient" ) return schema_registry_mock.return_value @@ -96,19 +96,16 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): schema_registry_url="http://localhost:8081", ) schema_handler = SchemaHandler.load_schema_handler( - TEST_SCHEMA_PROVIDER_MODULE, - config_enable, + TEST_SCHEMA_PROVIDER_MODULE, config_enable ) assert schema_handler is not None schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", - {}, + "com.bakdata.kpops.test.SchemaHandlerTest", {} ) schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SomeOtherSchemaClass", - {}, + "com.bakdata.kpops.test.SomeOtherSchemaClass", {} ) find_class_mock.assert_called_once_with(TEST_SCHEMA_PROVIDER_MODULE, SchemaProvider) @@ -116,8 +113,7 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): def test_should_raise_value_error_if_schema_provider_class_not_found(): schema_handler = SchemaHandler( - url="http://mock:8081", - components_module=NON_EXISTING_PROVIDER_MODULE, + url="http://mock:8081", components_module=NON_EXISTING_PROVIDER_MODULE ) with pytest.raises( @@ -127,8 +123,7 @@ def test_should_raise_value_error_if_schema_provider_class_not_found(): f"{SchemaProvider.__module__}.{SchemaProvider.__name__}.", ): schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", - {}, + "com.bakdata.kpops.test.SchemaHandlerTest", {} ) @@ -160,19 +155,15 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_ match="The Schema Registry URL is set but you haven't specified the component module path. Please provide a valid component module path where your SchemaProvider implementation exists.", ): schema_handler.schema_provider.provide_schema( - "com.bakdata.kpops.test.SchemaHandlerTest", - {}, + "com.bakdata.kpops.test.SchemaHandlerTest", {} ) def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( - to_section: ToSection, - log_info_mock: MagicMock, - schema_registry_mock: MagicMock, + to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock ): schema_handler = SchemaHandler( - url="http://mock:8081", - components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE ) schema_registry_mock.get_versions.return_value = [] @@ -180,7 +171,7 @@ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( schema_handler.submit_schemas(to_section, True) log_info_mock.assert_called_once_with( - greenify("Schema Submission: The subject topic-X-value will be submitted."), + greenify("Schema Submission: The subject topic-X-value will be submitted.") ) schema_registry_mock.register.assert_not_called() @@ -192,8 +183,7 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", - components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE ) schema_registry_mock.get_versions.return_value = [1, 2, 3] @@ -203,7 +193,7 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( schema_handler.submit_schemas(to_section, True) log_info_mock.assert_called_once_with( - f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}.", + f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}." ) schema_registry_mock.register.assert_not_called() @@ -215,8 +205,7 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", - components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -255,8 +244,7 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", - components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) @@ -269,13 +257,13 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve assert log_info_mock.mock_calls == [ mock.call( - f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}.", + f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}." ), ] assert log_debug_mock.mock_calls == [ mock.call( - f"Schema Submission: schema was already submitted for the subject topic-X-value as version {registered_version.schema}. Therefore, the specified schema must be compatible.", + f"Schema Submission: schema was already submitted for the subject topic-X-value as version {registered_version.schema}. Therefore, the specified schema must be compatible." ), ] @@ -292,8 +280,7 @@ def test_should_submit_non_existing_schema_when_not_dry( schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) schema_handler = SchemaHandler( - url="http://mock:8081", - components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE ) schema_registry_mock.get_versions.return_value = [] @@ -302,13 +289,12 @@ def test_should_submit_non_existing_schema_when_not_dry( subject = "topic-X-value" log_info_mock.assert_called_once_with( - f"Schema Submission: schema submitted for {subject} with model {topic_config.value_schema}.", + f"Schema Submission: schema submitted for {subject} with model {topic_config.value_schema}." ) schema_registry_mock.get_versions.assert_not_called() schema_registry_mock.register.assert_called_once_with( - subject=subject, - schema=schema, + subject=subject, schema=schema ) @@ -318,8 +304,7 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( schema_registry_mock: MagicMock, ): schema_handler = SchemaHandler( - url="http://mock:8081", - components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE ) schema_registry_mock.get_versions.return_value = [] @@ -327,19 +312,17 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( schema_handler.delete_schemas(to_section, True) log_info_mock.assert_called_once_with( - magentaify("Schema Deletion: will delete subject topic-X-value."), + magentaify("Schema Deletion: will delete subject topic-X-value.") ) schema_registry_mock.delete_subject.assert_not_called() def test_should_delete_schemas_when_not_in_dry_run( - to_section: ToSection, - schema_registry_mock: MagicMock, + to_section: ToSection, schema_registry_mock: MagicMock ): schema_handler = SchemaHandler( - url="http://mock:8081", - components_module=TEST_SCHEMA_PROVIDER_MODULE, + url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE ) schema_registry_mock.get_versions.return_value = [] diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index f0e121dd7..e26fb0e5a 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -32,9 +32,7 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def _setup(self, httpx_mock: HTTPXMock): config = PipelineConfig( - defaults_path=DEFAULTS_PATH, - environment="development", - kafka_rest_host=HOST, + defaults_path=DEFAULTS_PATH, environment="development", kafka_rest_host=HOST ) self.proxy_wrapper = ProxyWrapper(pipeline_config=config) @@ -63,8 +61,7 @@ def test_should_raise_exception_when_host_is_not_set(self): @patch("httpx.post") def test_should_create_topic_with_all_topic_configuration( - self, - mock_post: MagicMock, + self, mock_post: MagicMock ): topic_spec = { "topic_name": "topic-X", @@ -130,7 +127,7 @@ def test_should_call_batch_alter_topic_config(self, mock_put: MagicMock): "data": [ {"name": "cleanup.policy", "operation": "DELETE"}, {"name": "compression.type", "value": "gzip"}, - ], + ] }, ) @@ -157,9 +154,7 @@ def test_should_call_get_broker_config(self, mock_get: MagicMock): ) def test_should_log_topic_creation( - self, - log_info_mock: MagicMock, - httpx_mock: HTTPXMock, + self, log_info_mock: MagicMock, httpx_mock: HTTPXMock ): topic_spec = { "topic_name": "topic-X", @@ -182,9 +177,7 @@ def test_should_log_topic_creation( log_info_mock.assert_called_once_with("Topic topic-X created.") def test_should_log_topic_deletion( - self, - log_info_mock: MagicMock, - httpx_mock: HTTPXMock, + self, log_info_mock: MagicMock, httpx_mock: HTTPXMock ): topic_name = "topic-X" @@ -231,9 +224,7 @@ def test_should_get_topic(self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock assert get_topic_response == topic_response def test_should_rais_topic_not_found_exception_get_topic( - self, - log_debug_mock: MagicMock, - httpx_mock: HTTPXMock, + self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock ): topic_name = "topic-X" @@ -252,9 +243,7 @@ def test_should_rais_topic_not_found_exception_get_topic( log_debug_mock.assert_any_call("Topic topic-X not found.") def test_should_log_reset_default_topic_config_when_deleted( - self, - log_info_mock: MagicMock, - httpx_mock: HTTPXMock, + self, log_info_mock: MagicMock, httpx_mock: HTTPXMock ): topic_name = "topic-X" config_name = "cleanup.policy" @@ -273,5 +262,5 @@ def test_should_log_reset_default_topic_config_when_deleted( ) log_info_mock.assert_called_once_with( - f"Config of topic {topic_name} was altered.", + f"Config of topic {topic_name} was altered." ) diff --git a/tests/component_handlers/topic/test_topic_handler.py b/tests/component_handlers/topic/test_topic_handler.py index aeb04f6c0..6b1b017fc 100644 --- a/tests/component_handlers/topic/test_topic_handler.py +++ b/tests/component_handlers/topic/test_topic_handler.py @@ -70,7 +70,7 @@ def get_topic_response_mock(self) -> MagicMock: wrapper.get_topic.return_value = TopicResponse(**response) wrapper.get_broker_config.return_value = BrokerConfigResponse(**broker_response) wrapper.get_topic_config.return_value = TopicConfigResponse( - **response_topic_config, + **response_topic_config ) return wrapper @@ -121,8 +121,7 @@ def test_should_call_create_topic_with_dry_run_false(self): wrapper.__dry_run_topic_creation.assert_not_called() def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_false( - self, - get_topic_response_mock: MagicMock, + self, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -148,9 +147,7 @@ def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_fals wrapper.__dry_run_topic_creation.assert_not_called() def test_should_update_topic_config_when_one_config_changed( - self, - log_info_mock: MagicMock, - get_topic_response_mock: MagicMock, + self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -172,9 +169,7 @@ def test_should_update_topic_config_when_one_config_changed( ) def test_should_not_update_topic_config_when_config_not_changed( - self, - log_info_mock: MagicMock, - get_topic_response_mock: MagicMock, + self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -192,13 +187,11 @@ def test_should_not_update_topic_config_when_config_not_changed( wrapper.batch_alter_topic_config.assert_not_called() log_info_mock.assert_called_once_with( - "Topic Creation: config of topic topic-X didn't change. Skipping update.", + "Topic Creation: config of topic topic-X didn't change. Skipping update." ) def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( - self, - log_info_mock: MagicMock, - get_topic_response_mock: MagicMock, + self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -215,12 +208,11 @@ def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( wrapper.batch_alter_topic_config.assert_not_called() log_info_mock.assert_called_once_with( - "Topic Creation: config of topic topic-X didn't change. Skipping update.", + "Topic Creation: config of topic topic-X didn't change. Skipping update." ) def test_should_call_reset_topic_config_when_topic_exists_dry_run_false_and_topic_configs_change( - self, - get_topic_response_mock: MagicMock, + self, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -260,8 +252,7 @@ def test_should_not_call_create_topics_with_dry_run_true_and_topic_not_exists(se wrapper.create_topic.assert_not_called() def test_should_print_message_with_dry_run_true_and_topic_not_exists( - self, - log_info_mock: MagicMock, + self, log_info_mock: MagicMock ): wrapper = MagicMock() wrapper.get_topic.side_effect = TopicNotFoundException() @@ -281,8 +272,8 @@ def test_should_print_message_with_dry_run_true_and_topic_not_exists( log_info_mock.assert_called_once_with( greenify( - "Topic Creation: topic-X does not exist in the cluster. Creating topic.", - ), + "Topic Creation: topic-X does not exist in the cluster. Creating topic." + ) ) def test_should_print_message_if_dry_run_and_topic_exists_with_same_partition_count_and_replication_factor( @@ -305,19 +296,19 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_same_partition_co topic_handler.create_topics(to_section=to_section, dry_run=True) wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff assert log_info_mock.mock_calls == [ - mock.call("Topic Creation: topic-X already exists in cluster."), + mock.call("Topic Creation: topic-X already exists in cluster.") ] assert log_debug_mock.mock_calls == [ mock.call("HTTP/1.1 400 Bad Request"), mock.call({"Content-Type": "application/json"}), mock.call( - {"error_code": 40002, "message": "Topic 'topic-X' already exists."}, + {"error_code": 40002, "message": "Topic 'topic-X' already exists."} ), mock.call( - "Topic Creation: partition count of topic topic-X did not change. Current partitions count 10. Updating configs.", + "Topic Creation: partition count of topic topic-X did not change. Current partitions count 10. Updating configs." ), mock.call( - "Topic Creation: replication factor of topic topic-X did not change. Current replication factor 3. Updating configs.", + "Topic Creation: replication factor of topic topic-X did not change. Current replication factor 3. Updating configs." ), ] @@ -341,7 +332,7 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition assert log_info_mock.mock_calls == [ mock.call("Config changes for topic topic-X:"), mock.call( - "\n\x1b[32m+ cleanup.policy: compact\n\x1b[0m\x1b[32m+ compression.type: gzip\n\x1b[0m", + "\n\x1b[32m+ cleanup.policy: compact\n\x1b[0m\x1b[32m+ compression.type: gzip\n\x1b[0m" ), mock.call("Topic Creation: topic-X already exists in cluster."), ] @@ -349,19 +340,18 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition mock.call("HTTP/1.1 400 Bad Request"), mock.call({"Content-Type": "application/json"}), mock.call( - {"error_code": 40002, "message": "Topic 'topic-X' already exists."}, + {"error_code": 40002, "message": "Topic 'topic-X' already exists."} ), mock.call( - "Topic Creation: partition count of topic topic-X did not change. Current partitions count 1. Updating configs.", + "Topic Creation: partition count of topic topic-X did not change. Current partitions count 1. Updating configs." ), mock.call( - "Topic Creation: replication factor of topic topic-X did not change. Current replication factor 1. Updating configs.", + "Topic Creation: replication factor of topic topic-X did not change. Current replication factor 1. Updating configs." ), ] def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( - self, - get_topic_response_mock: MagicMock, + self, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -383,8 +373,7 @@ def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( - self, - get_topic_response_mock: MagicMock, + self, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -406,9 +395,7 @@ def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff def test_should_log_correct_message_when_delete_existing_topic_dry_run( - self, - log_info_mock: MagicMock, - get_topic_response_mock: MagicMock, + self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -427,13 +414,12 @@ def test_should_log_correct_message_when_delete_existing_topic_dry_run( wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_info_mock.assert_called_once_with( magentaify( - "Topic Deletion: topic topic-X exists in the cluster. Deleting topic.", - ), + "Topic Deletion: topic topic-X exists in the cluster. Deleting topic." + ) ) def test_should_log_correct_message_when_delete_non_existing_topic_dry_run( - self, - log_warning_mock: MagicMock, + self, log_warning_mock: MagicMock ): wrapper = MagicMock() wrapper.get_topic.side_effect = TopicNotFoundException @@ -452,7 +438,7 @@ def test_should_log_correct_message_when_delete_non_existing_topic_dry_run( wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_warning_mock.assert_called_once_with( - "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping.", + "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping." ) def test_should_call_delete_topic_not_dry_run(self): @@ -475,8 +461,7 @@ def test_should_call_delete_topic_not_dry_run(self): ] def test_should_print_correct_warning_when_deleting_topic_that_does_not_exists_not_dry_run( - self, - log_warning_mock: MagicMock, + self, log_warning_mock: MagicMock ): wrapper = MagicMock() topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -494,5 +479,5 @@ def test_should_print_correct_warning_when_deleting_topic_that_does_not_exists_n wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_warning_mock.assert_called_once_with( - "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping.", + "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping." ) diff --git a/tests/component_handlers/topic/test_utils.py b/tests/component_handlers/topic/test_utils.py index 0d3bd1170..b5f0133ca 100644 --- a/tests/component_handlers/topic/test_utils.py +++ b/tests/component_handlers/topic/test_utils.py @@ -86,7 +86,7 @@ "name": "log.flush.interval.messages", "source": "DEFAULT_CONFIG", "value": "9223372036854775807", - }, + } ], "topic_name": "fake", "value": "9223372036854775807", @@ -108,7 +108,7 @@ "name": "flush.ms", "source": "DEFAULT_CONFIG", "value": "9223372036854775807", - }, + } ], "topic_name": "fake", "value": "9223372036854775807", @@ -247,7 +247,7 @@ def test_get_effective_config(): ], }, ], - }, + } ) effective_config = get_effective_config( diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index 176303851..d066d431b 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -77,9 +77,7 @@ class TestBaseDefaultsComponent: ], ) def test_load_defaults( - self, - component_class: type[BaseDefaultsComponent], - defaults: dict, + self, component_class: type[BaseDefaultsComponent], defaults: dict ): assert ( load_defaults(component_class, DEFAULTS_PATH / "defaults.yaml") == defaults @@ -107,9 +105,7 @@ def test_load_defaults( ], ) def test_load_defaults_with_environment( - self, - component_class: type[BaseDefaultsComponent], - defaults: dict, + self, component_class: type[BaseDefaultsComponent], defaults: dict ): assert ( load_defaults( @@ -121,9 +117,7 @@ def test_load_defaults_with_environment( ) def test_inherit_defaults( - self, - config: PipelineConfig, - handlers: ComponentHandlers, + self, config: PipelineConfig, handlers: ComponentHandlers ): component = Child(config=config, handlers=handlers) @@ -131,7 +125,7 @@ def test_inherit_defaults( component.name == "fake-child-name" ), "Child default should overwrite parent default" assert component.nice == { - "fake-value": "fake", + "fake-value": "fake" }, "Field introduce by child should be added" assert ( component.value == 2.0 @@ -154,7 +148,7 @@ def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): component.name == "name-defined-in-pipeline_generator" ), "Kwargs should should overwrite all other values" assert component.nice == { - "fake-value": "fake", + "fake-value": "fake" }, "Field introduce by child should be added" assert ( component.value == 2.0 @@ -167,9 +161,7 @@ def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): ), "Defaults in code should be kept for parents" def test_multiple_generations( - self, - config: PipelineConfig, - handlers: ComponentHandlers, + self, config: PipelineConfig, handlers: ComponentHandlers ): component = GrandChild(config=config, handlers=handlers) @@ -177,7 +169,7 @@ def test_multiple_generations( component.name == "fake-child-name" ), "Child default should overwrite parent default" assert component.nice == { - "fake-value": "fake", + "fake-value": "fake" }, "Field introduce by child should be added" assert ( component.value == 2.0 @@ -191,13 +183,11 @@ def test_multiple_generations( assert component.grand_child == "grand-child-value" def test_env_var_substitution( - self, - config: PipelineConfig, - handlers: ComponentHandlers, + self, config: PipelineConfig, handlers: ComponentHandlers ): ENV["pipeline_name"] = str(DEFAULTS_PATH) component = EnvVarTest(config=config, handlers=handlers) assert component.name == str( - DEFAULTS_PATH, + DEFAULTS_PATH ), "Environment variables should be substituted" diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index d39d2f6bc..8fd0d98ec 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -80,8 +80,7 @@ def test_should_deploy_kafka_app( ) helm_upgrade_install = mocker.patch.object(kafka_app.helm, "upgrade_install") print_helm_diff = mocker.patch.object( - kafka_app.dry_run_handler, - "print_helm_diff", + kafka_app.dry_run_handler, "print_helm_diff" ) mocker.patch.object( KafkaApp, diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index ce831d0d4..2adf867da 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -42,13 +42,13 @@ def handlers(self) -> ComponentHandlers: @pytest.fixture(autouse=True) def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.components.base_components.kafka_connector.Helm", + "kpops.components.base_components.kafka_connector.Helm" ).return_value @pytest.fixture() def dry_run_handler(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.components.base_components.kafka_connector.DryRunHandler", + "kpops.components.base_components.kafka_connector.DryRunHandler" ).return_value @pytest.fixture() @@ -57,7 +57,7 @@ def connector_config(self) -> KafkaConnectorConfig: **{ "connector.class": CONNECTOR_CLASS, "name": CONNECTOR_FULL_NAME, - }, + } ) def test_connector_config_name_override( @@ -85,8 +85,7 @@ def test_connector_config_name_override( assert connector.app.name == CONNECTOR_FULL_NAME with pytest.raises( - ValueError, - match="Connector name should be the same as component name", + ValueError, match="Connector name should be the same as component name" ): KafkaConnector( name=CONNECTOR_NAME, @@ -97,8 +96,7 @@ def test_connector_config_name_override( ) with pytest.raises( - ValueError, - match="Connector name should be the same as component name", + ValueError, match="Connector name should be the same as component name" ): KafkaConnector( name=CONNECTOR_NAME, diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 30f02f6a4..e8ed7aa22 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -55,10 +55,9 @@ def connector( to=ToSection( topics={ TopicName("${output_topic_name}"): TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), - }, + } ), ) @@ -74,7 +73,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics": topic_name}, + **{**connector_config.dict(), "topics": topic_name} ), namespace="test-namespace", ) @@ -86,7 +85,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics.regex": topic_pattern}, + **{**connector_config.dict(), "topics.regex": topic_pattern} ), namespace="test-namespace", ) @@ -110,7 +109,7 @@ def test_from_section_parsing_input_topic( topics={ topic1: FromTopic(type=InputTopicTypes.INPUT), topic2: FromTopic(type=InputTopicTypes.INPUT), - }, + } ), ) assert getattr(connector.app, "topics") == f"{topic1},{topic2}" @@ -133,7 +132,7 @@ def test_from_section_parsing_input_pattern( app=connector_config, namespace="test-namespace", from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type - topics={topic_pattern: FromTopic(type=InputTopicTypes.PATTERN)}, + topics={topic_pattern: FromTopic(type=InputTopicTypes.PATTERN)} ), ) assert getattr(connector.app, "topics.regex") == topic_pattern @@ -144,12 +143,10 @@ def test_deploy_order( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - connector.handlers.topic_handler, - "create_topics", + connector.handlers.topic_handler, "create_topics" ) mock_create_connector = mocker.patch.object( - connector.handlers.connector_handler, - "create_connector", + connector.handlers.connector_handler, "create_connector" ) mock = mocker.MagicMock() @@ -167,15 +164,13 @@ def test_destroy( mocker: MockerFixture, ): mock_destroy_connector = mocker.patch.object( - connector.handlers.connector_handler, - "destroy_connector", + connector.handlers.connector_handler, "destroy_connector" ) connector.destroy(dry_run=True) mock_destroy_connector.assert_called_once_with( - CONNECTOR_FULL_NAME, - dry_run=True, + CONNECTOR_FULL_NAME, dry_run=True ) def test_reset_when_dry_run_is_true( @@ -196,12 +191,10 @@ def test_reset_when_dry_run_is_false( mocker: MockerFixture, ): mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, - "delete_topics", + connector.handlers.topic_handler, "delete_topics" ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, - "clean_connector", + connector.handlers.connector_handler, "clean_connector" ) mock = mocker.MagicMock() mock.attach_mock(mock_clean_connector, "mock_clean_connector") @@ -271,12 +264,10 @@ def test_clean_when_dry_run_is_false( mocker: MockerFixture, ): mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, - "delete_topics", + connector.handlers.topic_handler, "delete_topics" ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, - "clean_connector", + connector.handlers.connector_handler, "clean_connector" ) mock = mocker.MagicMock() @@ -290,13 +281,13 @@ def test_clean_when_dry_run_is_false( assert log_info_mock.mock_calls == [ call.log_info( magentaify( - f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {CONNECTOR_FULL_NAME}", - ), + f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {CONNECTOR_FULL_NAME}" + ) ), call.log_info( magentaify( - f"Connector Cleanup: deploy Connect {KafkaConnectorType.SINK.value} resetter for {CONNECTOR_FULL_NAME}", - ), + f"Connector Cleanup: deploy Connect {KafkaConnectorType.SINK.value} resetter for {CONNECTOR_FULL_NAME}" + ) ), call.log_info(magentaify("Connector Cleanup: uninstall Kafka Resetter.")), ] @@ -378,12 +369,10 @@ def test_clean_without_to_when_dry_run_is_false( ) mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, - "delete_topics", + connector.handlers.topic_handler, "delete_topics" ) mock_clean_connector = mocker.patch.object( - connector.handlers.connector_handler, - "clean_connector", + connector.handlers.connector_handler, "clean_connector" ) mock = mocker.MagicMock() mock.attach_mock(mock_delete_topics, "mock_delete_topics") diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index 4ed187884..169111ed3 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -48,10 +48,9 @@ def connector( to=ToSection( topics={ TopicName("${output_topic_name}"): TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), - }, + } ), offset_topic="kafka-connect-offsets", ) @@ -72,9 +71,9 @@ def test_from_section_raises_exception( from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type topics={ TopicName("connector-topic"): FromTopic( - type=InputTopicTypes.INPUT, + type=InputTopicTypes.INPUT ), - }, + } ), ) @@ -84,13 +83,11 @@ def test_deploy_order( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - connector.handlers.topic_handler, - "create_topics", + connector.handlers.topic_handler, "create_topics" ) mock_create_connector = mocker.patch.object( - connector.handlers.connector_handler, - "create_connector", + connector.handlers.connector_handler, "create_connector" ) mock = mocker.MagicMock() @@ -111,15 +108,13 @@ def test_destroy( assert connector.handlers.connector_handler mock_destroy_connector = mocker.patch.object( - connector.handlers.connector_handler, - "destroy_connector", + connector.handlers.connector_handler, "destroy_connector" ) connector.destroy(dry_run=True) mock_destroy_connector.assert_called_once_with( - CONNECTOR_FULL_NAME, - dry_run=True, + CONNECTOR_FULL_NAME, dry_run=True ) def test_reset_when_dry_run_is_true( @@ -142,12 +137,10 @@ def test_reset_when_dry_run_is_false( ): assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, - "delete_topics", + connector.handlers.topic_handler, "delete_topics" ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, - "clean_connector", + connector.handlers.connector_handler, "clean_connector" ) mock = mocker.MagicMock() @@ -217,12 +210,10 @@ def test_clean_when_dry_run_is_false( assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, - "delete_topics", + connector.handlers.topic_handler, "delete_topics" ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, - "clean_connector", + connector.handlers.connector_handler, "clean_connector" ) mock = mocker.MagicMock() @@ -295,12 +286,10 @@ def test_clean_without_to_when_dry_run_is_false( assert connector.handlers.connector_handler mock_delete_topics = mocker.patch.object( - connector.handlers.topic_handler, - "delete_topics", + connector.handlers.topic_handler, "delete_topics" ) mock_clean_connector = mocker.spy( - connector.handlers.connector_handler, - "clean_connector", + connector.handlers.connector_handler, "clean_connector" ) mock = mocker.MagicMock() diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index a3fc7281b..6583ac4bf 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -46,7 +46,7 @@ def handlers(self) -> ComponentHandlers: @pytest.fixture() def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.components.base_components.kubernetes_app.Helm", + "kpops.components.base_components.kubernetes_app.Helm" ).return_value @pytest.fixture() @@ -113,8 +113,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( app_value: KubernetesTestValue, ): repo_config = HelmRepoConfig( - repository_name="test-repo", - url="https://test.com/charts/", + repository_name="test-repo", url="https://test.com/charts/" ) kubernetes_app = KubernetesApp( name="test-kubernetes-app", @@ -212,9 +211,7 @@ def test_should_call_helm_uninstall_when_destroying_kubernetes_app( kubernetes_app.destroy(True) helm_mock.uninstall.assert_called_once_with( - "test-namespace", - "${pipeline_name}-test-kubernetes-app", - True, + "test-namespace", "${pipeline_name}-test-kubernetes-app", True ) log_info_mock.assert_called_once_with(magentaify(stdout)) @@ -227,8 +224,7 @@ def test_should_raise_value_error_when_name_is_not_valid( repo_config: HelmRepoConfig, ): with pytest.raises( - ValueError, - match=r"The component name .* is invalid for Kubernetes.", + ValueError, match=r"The component name .* is invalid for Kubernetes." ): KubernetesApp( name="Not-Compatible*", @@ -240,8 +236,7 @@ def test_should_raise_value_error_when_name_is_not_valid( ) with pytest.raises( - ValueError, - match=r"The component name .* is invalid for Kubernetes.", + ValueError, match=r"The component name .* is invalid for Kubernetes." ): KubernetesApp( name="snake_case*", diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index f6f4bb659..84f9f86c6 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -42,9 +42,7 @@ def config(self) -> PipelineConfig: @pytest.fixture() def producer_app( - self, - config: PipelineConfig, - handlers: ComponentHandlers, + self, config: PipelineConfig, handlers: ComponentHandlers ) -> ProducerApp: return ProducerApp( name=self.PRODUCER_APP_NAME, @@ -60,10 +58,9 @@ def producer_app( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), - }, + } }, }, ) @@ -82,21 +79,20 @@ def test_output_topics(self, config: PipelineConfig, handlers: ComponentHandlers "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), "extra-topic-1": TopicConfig( role="first-extra-topic", partitions_count=10, ), - }, + } }, }, ) assert producer_app.app.streams.output_topic == "${output_topic_name}" assert producer_app.app.streams.extra_output_topics == { - "first-extra-topic": "extra-topic-1", + "first-extra-topic": "extra-topic-1" } def test_deploy_order_when_dry_run_is_false( @@ -105,13 +101,11 @@ def test_deploy_order_when_dry_run_is_false( mocker: MockerFixture, ): mock_create_topics = mocker.patch.object( - producer_app.handlers.topic_handler, - "create_topics", + producer_app.handlers.topic_handler, "create_topics" ) mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, - "upgrade_install", + producer_app.helm, "upgrade_install" ) mock = mocker.MagicMock() @@ -156,9 +150,7 @@ def test_destroy( producer_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( - "test-namespace", - "${pipeline_name}-" + self.PRODUCER_APP_NAME, - True, + "test-namespace", "${pipeline_name}-" + self.PRODUCER_APP_NAME, True ) def test_should_not_reset_producer_app( @@ -167,13 +159,11 @@ def test_should_not_reset_producer_app( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, - "upgrade_install", + producer_app.helm, "upgrade_install" ) mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock_helm_print_helm_diff = mocker.patch.object( - producer_app.dry_run_handler, - "print_helm_diff", + producer_app.dry_run_handler, "print_helm_diff" ) mock = mocker.MagicMock() @@ -215,13 +205,10 @@ def test_should_not_reset_producer_app( ] def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( - self, - mocker: MockerFixture, - producer_app: ProducerApp, + self, mocker: MockerFixture, producer_app: ProducerApp ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, - "upgrade_install", + producer_app.helm, "upgrade_install" ) mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 071be0095..0d9135b54 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -47,9 +47,7 @@ def config(self) -> PipelineConfig: @pytest.fixture() def streams_app( - self, - config: PipelineConfig, - handlers: ComponentHandlers, + self, config: PipelineConfig, handlers: ComponentHandlers ) -> StreamsApp: return StreamsApp( name=self.STREAMS_APP_NAME, @@ -63,10 +61,9 @@ def streams_app( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), - }, + } }, }, ) @@ -94,7 +91,7 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): "type": "pattern", "role": "another-pattern", }, - }, + } }, }, ) @@ -105,7 +102,7 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): assert streams_app.app.streams.input_topics == ["example-input", "b", "a"] assert streams_app.app.streams.input_pattern == ".*" assert streams_app.app.streams.extra_input_patterns == { - "another-pattern": "example.*", + "another-pattern": "example.*" } helm_values = streams_app.to_helm_values() @@ -116,9 +113,7 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): assert "extraInputPatterns" in streams_config def test_no_empty_input_topic( - self, - config: PipelineConfig, - handlers: ComponentHandlers, + self, config: PipelineConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -132,7 +127,7 @@ def test_no_empty_input_topic( "from": { "topics": { ".*": {"type": "pattern"}, - }, + } }, }, ) @@ -151,8 +146,7 @@ def test_no_empty_input_topic( def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandlers): # An exception should be raised when both role and type are defined and type is input with pytest.raises( - ValueError, - match="Define role only if `type` is `pattern` or `None`", + ValueError, match="Define role only if `type` is `pattern` or `None`" ): StreamsApp( name=self.STREAMS_APP_NAME, @@ -168,16 +162,15 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle "topic-input": { "type": "input", "role": "role", - }, - }, + } + } }, }, ) # An exception should be raised when both role and type are defined and type is error with pytest.raises( - ValueError, - match="Define `role` only if `type` is undefined", + ValueError, match="Define `role` only if `type` is undefined" ): StreamsApp( name=self.STREAMS_APP_NAME, @@ -193,16 +186,14 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle "topic-input": { "type": "error", "role": "role", - }, - }, + } + } }, }, ) def test_set_streams_output_from_to( - self, - config: PipelineConfig, - handlers: ComponentHandlers, + self, config: PipelineConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -216,12 +207,10 @@ def test_set_streams_output_from_to( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), "${error_topic_name}": TopicConfig( - type=OutputTopicTypes.ERROR, - partitions_count=10, + type=OutputTopicTypes.ERROR, partitions_count=10 ), "extra-topic-1": TopicConfig( role="first-extra-topic", @@ -231,7 +220,7 @@ def test_set_streams_output_from_to( role="second-extra-topic", partitions_count=10, ), - }, + } }, }, ) @@ -243,9 +232,7 @@ def test_set_streams_output_from_to( assert streams_app.app.streams.error_topic == "${error_topic_name}" def test_weave_inputs_from_prev_component( - self, - config: PipelineConfig, - handlers: ComponentHandlers, + self, config: PipelineConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -263,23 +250,19 @@ def test_weave_inputs_from_prev_component( ToSection( topics={ TopicName("prev-output-topic"): TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), TopicName("b"): TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), TopicName("a"): TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), TopicName("prev-error-topic"): TopicConfig( - type=OutputTopicTypes.ERROR, - partitions_count=10, + type=OutputTopicTypes.ERROR, partitions_count=10 ), - }, - ), + } + ) ) assert streams_app.app.streams.input_topics == ["prev-output-topic", "b", "a"] @@ -302,12 +285,10 @@ def test_deploy_order_when_dry_run_is_false( "to": { "topics": { "${output_topic_name}": TopicConfig( - type=OutputTopicTypes.OUTPUT, - partitions_count=10, + type=OutputTopicTypes.OUTPUT, partitions_count=10 ), "${error_topic_name}": TopicConfig( - type=OutputTopicTypes.ERROR, - partitions_count=10, + type=OutputTopicTypes.ERROR, partitions_count=10 ), "extra-topic-1": TopicConfig( role="first-extra-topic", @@ -317,17 +298,15 @@ def test_deploy_order_when_dry_run_is_false( role="second-extra-topic", partitions_count=10, ), - }, + } }, }, ) mock_create_topics = mocker.patch.object( - streams_app.handlers.topic_handler, - "create_topics", + streams_app.handlers.topic_handler, "create_topics" ) mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, - "upgrade_install", + streams_app.helm, "upgrade_install" ) mock = mocker.MagicMock() @@ -353,7 +332,7 @@ def test_deploy_order_when_dry_run_is_false( }, "outputTopic": "${output_topic_name}", "errorTopic": "${error_topic_name}", - }, + } }, HelmUpgradeInstallFlags( create_namespace=False, @@ -376,19 +355,14 @@ def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): streams_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( - "test-namespace", - "${pipeline_name}-" + self.STREAMS_APP_NAME, - True, + "test-namespace", "${pipeline_name}-" + self.STREAMS_APP_NAME, True ) def test_reset_when_dry_run_is_false( - self, - streams_app: StreamsApp, - mocker: MockerFixture, + self, streams_app: StreamsApp, mocker: MockerFixture ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, - "upgrade_install", + streams_app.helm, "upgrade_install" ) mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") @@ -432,8 +406,7 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, - "upgrade_install", + streams_app.helm, "upgrade_install" ) mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index b1739e972..86e2c8b8e 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -52,13 +52,12 @@ def inflate(self) -> list[PipelineComponent]: to=ToSection( topics={ TopicName("${component_type}"): TopicConfig( - type=OutputTopicTypes.OUTPUT, + type=OutputTopicTypes.OUTPUT ), TopicName("${component_name}"): TopicConfig( - type=None, - role="test", + type=None, role="test" ), - }, + } ), ) inflate_steps.append(kafka_connector) @@ -69,9 +68,9 @@ def inflate(self) -> list[PipelineComponent]: to=ToSection( # type: ignore[reportGeneralTypeIssues] topics={ TopicName( - f"{self.full_name}-" + "${component_name}", - ): TopicConfig(type=OutputTopicTypes.OUTPUT), - }, + f"{self.full_name}-" + "${component_name}" + ): TopicConfig(type=OutputTopicTypes.OUTPUT) + } ).dict(), ) inflate_steps.append(streams_app) @@ -81,9 +80,7 @@ def inflate(self) -> list[PipelineComponent]: class TestSchemaProvider(SchemaProvider): def provide_schema( - self, - schema_class: str, - models: dict[ModelName, ModelVersion], + self, schema_class: str, models: dict[ModelName, ModelVersion] ) -> Schema: schema = { "type": "record", diff --git a/tests/pipeline/test_components_without_schema_handler/components.py b/tests/pipeline/test_components_without_schema_handler/components.py index 9ea414a9d..d5684178c 100644 --- a/tests/pipeline/test_components_without_schema_handler/components.py +++ b/tests/pipeline/test_components_without_schema_handler/components.py @@ -33,7 +33,7 @@ def inflate(self) -> list[PipelineComponent]: **{ "topics": topic_name, "transforms.changeTopic.replacement": f"{topic_name}-index-v1", - }, + } ), ) inflate_steps.append(kafka_connector) diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index ceda59d80..433960e74 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -48,8 +48,7 @@ def test_load_pipeline(self, snapshot: SnapshotTest): snapshot.assert_match(enriched_pipeline, "test-pipeline") def test_generate_with_steps_flag_should_write_log_warning( - self, - caplog: pytest.LogCaptureFixture, + self, caplog: pytest.LogCaptureFixture ): result = runner.invoke( app, @@ -73,7 +72,7 @@ def test_generate_with_steps_flag_should_write_log_warning( logging.WARNING, "The following flags are considered only when `--template` is set: \n \ '--steps'", - ), + ) ] assert result.exit_code == 0 diff --git a/tests/utils/test_dict_ops.py b/tests/utils/test_dict_ops.py index e9a02fe5b..1ea410770 100644 --- a/tests/utils/test_dict_ops.py +++ b/tests/utils/test_dict_ops.py @@ -70,7 +70,7 @@ class SimpleModel(BaseModel): }, }, problems=99, - ).json(), + ).json() ) existing_substitution = { "key1": "Everything", diff --git a/tests/utils/test_diff.py b/tests/utils/test_diff.py index 81b66b2cd..f2ffeac88 100644 --- a/tests/utils/test_diff.py +++ b/tests/utils/test_diff.py @@ -186,7 +186,7 @@ def test_render_diff(d1: dict, d2: dict, ignore: set[str] | None, output: str | diff_type=DiffType.CHANGE, key="a.b", change=Change(old_value=1, new_value=2), - ), + ) ], ), ], diff --git a/tests/utils/test_environment.py b/tests/utils/test_environment.py index e1da952b3..8fc02c826 100644 --- a/tests/utils/test_environment.py +++ b/tests/utils/test_environment.py @@ -91,8 +91,7 @@ def test_windows_behaviour_keys_transformation(system, fake_environment_windows) @patch("platform.system") def test_windows_behaviour_keys_transformation_as_kwargs( - system, - fake_environment_windows, + system, fake_environment_windows ): system.return_value = "Windows" environment = Environment(**fake_environment_windows) From 1e03e1a59565b73c9e6fef9b95932f2b74061f1c Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 14:21:57 +0300 Subject: [PATCH 51/76] test: whether matcher works --- .github/ruff-matcher.json | 18 ++++++++ .github/workflows/ci.yaml | 4 +- kpops/component_handlers/helm_wrapper/helm.py | 44 ++++++++----------- 3 files changed, 40 insertions(+), 26 deletions(-) create mode 100644 .github/ruff-matcher.json diff --git a/.github/ruff-matcher.json b/.github/ruff-matcher.json new file mode 100644 index 000000000..6a582326a --- /dev/null +++ b/.github/ruff-matcher.json @@ -0,0 +1,18 @@ +{ + "problemMatcher": [ + { + "owner": "ruff", + "pattern": [ + { + "regexp": "/^(.*)\/(.+\\.py):(\\d+):(\\d+):\\s([\\da-zA-Z]+)\\s(.*)$/mg", + "fromPath": 1, + "file": 2, + "line": 3, + "column": 4, + "code": 5, + "message": 6 + } + ] + } + ] +} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index df10833f5..79dcd2e5d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,7 +36,9 @@ jobs: run: poetry install --no-interaction - name: Lint (ruff) - run: poetry run pre-commit run ruff --all-files --show-diff-on-failure + run: | + echo "::add-matcher::.github/ruff-matcher.json" + poetry run pre-commit run ruff --all-files --show-diff-on-failure - name: Formatting (black) run: poetry run pre-commit run black --all-files --show-diff-on-failure diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index b1b101b41..2ad3f5f01 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -4,7 +4,8 @@ import re import subprocess import tempfile -from typing import TYPE_CHECKING +from collections.abc import Iterator +from typing import Iterable import yaml @@ -19,9 +20,6 @@ Version, ) -if TYPE_CHECKING: - from collections.abc import Iterable, Iterator - log = logging.getLogger("Helm") @@ -31,17 +29,16 @@ def __init__(self, helm_config: HelmConfig) -> None: self._debug = helm_config.debug self._version = self.get_version() if self._version.major != 3: - msg = f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}" - raise RuntimeError(msg) + raise RuntimeError( + f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}" + ) def add_repo( self, repository_name: str, repository_url: str, - repo_auth_flags: RepoAuthFlags | None = None, + repo_auth_flags: RepoAuthFlags = RepoAuthFlags(), ) -> None: - if repo_auth_flags is None: - repo_auth_flags = RepoAuthFlags() command = [ "helm", "repo", @@ -53,7 +50,7 @@ def add_repo( try: self.__execute(command) - except (ReleaseNotFoundException, RuntimeError) as e: + except Exception as e: if ( len(e.args) == 1 and re.match( @@ -62,9 +59,9 @@ def add_repo( ) is not None ): - log.exception(f"Could not add repository {repository_name}.") + log.error(f"Could not add repository {repository_name}. {e}") else: - raise + raise e if self._version.minor > 7: self.__execute(["helm", "repo", "update", repository_name]) @@ -78,11 +75,9 @@ def upgrade_install( dry_run: bool, namespace: str, values: dict, - flags: HelmUpgradeInstallFlags | None = None, + flags: HelmUpgradeInstallFlags = HelmUpgradeInstallFlags(), ) -> str: - """Prepare and execute the `helm upgrade --install` command.""" - if flags is None: - flags = HelmUpgradeInstallFlags() + """Prepares and executes the `helm upgrade --install` command""" with tempfile.NamedTemporaryFile("w") as values_file: yaml.safe_dump(values, values_file) @@ -108,7 +103,7 @@ def uninstall( release_name: str, dry_run: bool, ) -> str | None: - """Prepare and execute the helm uninstall command.""" + """Prepares and executes the helm uninstall command""" command = [ "helm", "uninstall", @@ -131,7 +126,7 @@ def template( chart: str, namespace: str, values: dict, - flags: HelmTemplateFlags | None = None, + flags: HelmTemplateFlags = HelmTemplateFlags(), ) -> str: """From HELM: Render chart templates locally and display the output. @@ -146,8 +141,6 @@ def template( :param flags: the flags to be set for `helm template`, defaults to HelmTemplateFlags() :return: the output of `helm template` """ - if flags is None: - flags = HelmTemplateFlags() with tempfile.NamedTemporaryFile("w") as values_file: yaml.safe_dump(values, values_file) command = [ @@ -184,8 +177,9 @@ def get_version(self) -> Version: short_version = self.__execute(command) version_match = re.search(r"^v(\d+(?:\.\d+){0,2})", short_version) if version_match is None: - msg = f"Could not parse the Helm version.\n\nHelm output:\n{short_version}" - raise RuntimeError(msg) + raise RuntimeError( + f"Could not parse the Helm version.\n\nHelm output:\n{short_version}" + ) version = map(int, version_match.group(1).split(".")) return Version(*version) @@ -212,8 +206,8 @@ def __execute(self, command: list[str]) -> str: log.debug(f"Executing {' '.join(command)}") process = subprocess.run( command, - check=True, - capture_output=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, text=True, ) Helm.parse_helm_command_stderr_output(process.stderr) @@ -234,7 +228,7 @@ def parse_helm_command_stderr_output(stderr: str) -> None: for line in stderr.splitlines(): lower = line.lower() if "release: not found" in lower: - raise ReleaseNotFoundException + raise ReleaseNotFoundException() elif "error" in lower: raise RuntimeError(stderr) elif "warning" in lower: From 2fab62506a0370d2a6d50f7be58bc3eb7b221b5a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 14:34:34 +0300 Subject: [PATCH 52/76] test: problem checker --- kpops/component_handlers/helm_wrapper/helm.py | 42 ++++++++++--------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 2ad3f5f01..7039f7150 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -4,8 +4,7 @@ import re import subprocess import tempfile -from collections.abc import Iterator -from typing import Iterable +from typing import TYPE_CHECKING import yaml @@ -19,6 +18,7 @@ RepoAuthFlags, Version, ) +from collections.abc import Iterable, Iterator log = logging.getLogger("Helm") @@ -29,16 +29,17 @@ def __init__(self, helm_config: HelmConfig) -> None: self._debug = helm_config.debug self._version = self.get_version() if self._version.major != 3: - raise RuntimeError( - f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}" - ) + msg = f"The supported Helm version is 3.x.x. The current Helm version is {self._version.major}.{self._version.minor}.{self._version.patch}" + raise RuntimeError(msg) def add_repo( self, repository_name: str, repository_url: str, - repo_auth_flags: RepoAuthFlags = RepoAuthFlags(), + repo_auth_flags: RepoAuthFlags | None = None, ) -> None: + if repo_auth_flags is None: + repo_auth_flags = RepoAuthFlags() command = [ "helm", "repo", @@ -50,7 +51,7 @@ def add_repo( try: self.__execute(command) - except Exception as e: + except (ReleaseNotFoundException, RuntimeError) as e: if ( len(e.args) == 1 and re.match( @@ -59,9 +60,9 @@ def add_repo( ) is not None ): - log.error(f"Could not add repository {repository_name}. {e}") + log.exception(f"Could not add repository {repository_name}.") else: - raise e + raise if self._version.minor > 7: self.__execute(["helm", "repo", "update", repository_name]) @@ -75,9 +76,11 @@ def upgrade_install( dry_run: bool, namespace: str, values: dict, - flags: HelmUpgradeInstallFlags = HelmUpgradeInstallFlags(), + flags: HelmUpgradeInstallFlags | None = None, ) -> str: - """Prepares and executes the `helm upgrade --install` command""" + """Prepare and execute the `helm upgrade --install` command.""" + if flags is None: + flags = HelmUpgradeInstallFlags() with tempfile.NamedTemporaryFile("w") as values_file: yaml.safe_dump(values, values_file) @@ -103,7 +106,7 @@ def uninstall( release_name: str, dry_run: bool, ) -> str | None: - """Prepares and executes the helm uninstall command""" + """Prepare and execute the helm uninstall command.""" command = [ "helm", "uninstall", @@ -126,7 +129,7 @@ def template( chart: str, namespace: str, values: dict, - flags: HelmTemplateFlags = HelmTemplateFlags(), + flags: HelmTemplateFlags | None = None, ) -> str: """From HELM: Render chart templates locally and display the output. @@ -141,6 +144,8 @@ def template( :param flags: the flags to be set for `helm template`, defaults to HelmTemplateFlags() :return: the output of `helm template` """ + if flags is None: + flags = HelmTemplateFlags() with tempfile.NamedTemporaryFile("w") as values_file: yaml.safe_dump(values, values_file) command = [ @@ -177,9 +182,8 @@ def get_version(self) -> Version: short_version = self.__execute(command) version_match = re.search(r"^v(\d+(?:\.\d+){0,2})", short_version) if version_match is None: - raise RuntimeError( - f"Could not parse the Helm version.\n\nHelm output:\n{short_version}" - ) + msg = f"Could not parse the Helm version.\n\nHelm output:\n{short_version}" + raise RuntimeError(msg) version = map(int, version_match.group(1).split(".")) return Version(*version) @@ -206,8 +210,8 @@ def __execute(self, command: list[str]) -> str: log.debug(f"Executing {' '.join(command)}") process = subprocess.run( command, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + check=True, + capture_output=True, text=True, ) Helm.parse_helm_command_stderr_output(process.stderr) @@ -228,7 +232,7 @@ def parse_helm_command_stderr_output(stderr: str) -> None: for line in stderr.splitlines(): lower = line.lower() if "release: not found" in lower: - raise ReleaseNotFoundException() + raise ReleaseNotFoundException elif "error" in lower: raise RuntimeError(stderr) elif "warning" in lower: From e3b71221a428a0dc560bf8f8c778fc01774d0aff Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 14:48:12 +0300 Subject: [PATCH 53/76] fix: ci --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 79dcd2e5d..e15e571e8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -38,7 +38,7 @@ jobs: - name: Lint (ruff) run: | echo "::add-matcher::.github/ruff-matcher.json" - poetry run pre-commit run ruff --all-files --show-diff-on-failure + poetry run pre-commit run ruff --all-files --show-diff-on-failure --output-format "text" - name: Formatting (black) run: poetry run pre-commit run black --all-files --show-diff-on-failure From 884445f1e872aa646ddfad374d012dbd7c918ada Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 14:53:36 +0300 Subject: [PATCH 54/76] ci(ruff): format output as "text" in the CI only --- .github/workflows/ci.yaml | 2 +- .pre-commit-config.yaml | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e15e571e8..0e8c32875 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -38,7 +38,7 @@ jobs: - name: Lint (ruff) run: | echo "::add-matcher::.github/ruff-matcher.json" - poetry run pre-commit run ruff --all-files --show-diff-on-failure --output-format "text" + poetry run pre-commit run --hook-stage manual ruff-ci --all-files --show-diff-on-failure - name: Formatting (black) run: poetry run pre-commit run black --all-files --show-diff-on-failure diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4cfcb0651..10b0dc9b2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,6 +7,13 @@ repos: args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix ] language: system types_or: [python] + - id: ruff-ci + name: ruff-ci + entry: ruff + args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix, --format-output "text" ] + language: system + types_or: [python] + stages: [manual] - repo: local hooks: - id: black From 0eb104dec716fa46c5d8033758c1e37505a8c060 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 14:55:13 +0300 Subject: [PATCH 55/76] fix(ruff): pre-commit wrong arg --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 10b0dc9b2..028858db7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: - id: ruff-ci name: ruff-ci entry: ruff - args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix, --format-output "text" ] + args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix, --output-format "text" ] language: system types_or: [python] stages: [manual] From 47b337703acdf4ecc62612cfa48d2a3a4f8f9ef9 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 14:57:54 +0300 Subject: [PATCH 56/76] fix: ruff args in pre-commit --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 028858db7..58cbd7662 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: - id: ruff-ci name: ruff-ci entry: ruff - args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix, --output-format "text" ] + args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix, --output-format, text ] language: system types_or: [python] stages: [manual] From 60a500b28bc69921f342351ad395bdd1494e8d85 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 15:24:00 +0300 Subject: [PATCH 57/76] fix(ci): ruff pre-commit hook --- .pre-commit-config.yaml | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 58cbd7662..3d2767016 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,17 +3,21 @@ repos: hooks: - id: ruff name: ruff - entry: ruff + entry: ruff check . args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix ] language: system types_or: [python] - - id: ruff-ci - name: ruff-ci - entry: ruff - args: [ --config, pyproject.toml, --fix, --show-fixes, --exit-non-zero-on-fix, --output-format, text ] - language: system - types_or: [python] - stages: [manual] + require_serial: true # run once for all files + - repo: local + hooks: + - id: ruff-ci + name: ruff-ci + entry: ruff check . + args: [ --output-format, text, --no-fix] + language: system + types_or: [python] + require_serial: true # run once for all files + stages: [manual] - repo: local hooks: - id: black From 571c43f03740fe836245b4dfc1afa14654c99b33 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 15:31:57 +0300 Subject: [PATCH 58/76] ci: attempt to fix --- .github/ruff-matcher.json | 2 +- .pre-commit-config.yaml | 18 ++++++++---------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/.github/ruff-matcher.json b/.github/ruff-matcher.json index 6a582326a..0f01452e9 100644 --- a/.github/ruff-matcher.json +++ b/.github/ruff-matcher.json @@ -4,7 +4,7 @@ "owner": "ruff", "pattern": [ { - "regexp": "/^(.*)\/(.+\\.py):(\\d+):(\\d+):\\s([\\da-zA-Z]+)\\s(.*)$/mg", + "regexp": "^(.*)\/(.+\\.py):(\\d+):(\\d+):\\s([\\da-zA-Z]+)\\s(.*)$/mg", "fromPath": 1, "file": 2, "line": 3, diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3d2767016..3245ebb11 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,16 +8,14 @@ repos: language: system types_or: [python] require_serial: true # run once for all files - - repo: local - hooks: - - id: ruff-ci - name: ruff-ci - entry: ruff check . - args: [ --output-format, text, --no-fix] - language: system - types_or: [python] - require_serial: true # run once for all files - stages: [manual] + - id: ruff-ci + name: ruff-ci + entry: ruff check . + args: [ --config, pyproject.toml, --output-format, text, --no-fix] + language: system + types_or: [python] + require_serial: true # run once for all files + stages: [manual] - repo: local hooks: - id: black From 3283ec0adb613e8783694c213b4576d8508fa531 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 15:34:48 +0300 Subject: [PATCH 59/76] fix: problem matcher regex --- .github/ruff-matcher.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ruff-matcher.json b/.github/ruff-matcher.json index 0f01452e9..41e8b4f4a 100644 --- a/.github/ruff-matcher.json +++ b/.github/ruff-matcher.json @@ -4,7 +4,7 @@ "owner": "ruff", "pattern": [ { - "regexp": "^(.*)\/(.+\\.py):(\\d+):(\\d+):\\s([\\da-zA-Z]+)\\s(.*)$/mg", + "regexp": "^(.*)\/(.+\\.py):(\\d+):(\\d+):\\s([\\da-zA-Z]+)\\s(.*)$", "fromPath": 1, "file": 2, "line": 3, From 6622b5929a9a01a022e5f20f2628d22e31c74cbe Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 15:38:17 +0300 Subject: [PATCH 60/76] fix: regex problem matchr --- .github/ruff-matcher.json | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/ruff-matcher.json b/.github/ruff-matcher.json index 41e8b4f4a..bc3b10738 100644 --- a/.github/ruff-matcher.json +++ b/.github/ruff-matcher.json @@ -4,13 +4,12 @@ "owner": "ruff", "pattern": [ { - "regexp": "^(.*)\/(.+\\.py):(\\d+):(\\d+):\\s([\\da-zA-Z]+)\\s(.*)$", - "fromPath": 1, - "file": 2, - "line": 3, - "column": 4, - "code": 5, - "message": 6 + "regexp": "^(.*):(\\d+):(\\d+):\\s([\\da-zA-Z]+)\\s(.*)$", + "file": 1, + "line": 2, + "column": 3, + "code": 4, + "message": 5 } ] } From 797aec7b8fa4de4bd187f43e1bc29a787552d6b9 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 15:51:19 +0300 Subject: [PATCH 61/76] ci: only lint once with ruff --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0e8c32875..9be387543 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,6 +36,7 @@ jobs: run: poetry install --no-interaction - name: Lint (ruff) + if: matrix.os == "ubuntu-22.04" && matrix.python-version == "3.10" run: | echo "::add-matcher::.github/ruff-matcher.json" poetry run pre-commit run --hook-stage manual ruff-ci --all-files --show-diff-on-failure From fcfbcaf04c66ea62f35483cf37abe5a58dff2326 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 15:53:45 +0300 Subject: [PATCH 62/76] chore: lint ci --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9be387543..934bc4764 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,7 +36,7 @@ jobs: run: poetry install --no-interaction - name: Lint (ruff) - if: matrix.os == "ubuntu-22.04" && matrix.python-version == "3.10" + if: matrix.os == 'ubuntu-22.04' && matrix.python-version == '3.10' run: | echo "::add-matcher::.github/ruff-matcher.json" poetry run pre-commit run --hook-stage manual ruff-ci --all-files --show-diff-on-failure From 049a3db547e120116e53061c2c25ba6ab16b2512 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 17:09:56 +0300 Subject: [PATCH 63/76] fix(ci): deduplicate pre-commit run --- .pre-commit-config.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3245ebb11..4a7560c89 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,11 +10,12 @@ repos: require_serial: true # run once for all files - id: ruff-ci name: ruff-ci - entry: ruff check . - args: [ --config, pyproject.toml, --output-format, text, --no-fix] + entry: ruff + args: [ check, ".", --config, pyproject.toml, --output-format, text, --no-fix] language: system types_or: [python] require_serial: true # run once for all files + pass_filenames: false stages: [manual] - repo: local hooks: From 5229ff1b927042a7c8c8012e13aec1f6e93cf1d2 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 17:31:37 +0300 Subject: [PATCH 64/76] ci: always run ruff --- .github/workflows/ci.yaml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 934bc4764..52b799db4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,10 +36,15 @@ jobs: run: poetry install --no-interaction - name: Lint (ruff) - if: matrix.os == 'ubuntu-22.04' && matrix.python-version == '3.10' run: | - echo "::add-matcher::.github/ruff-matcher.json" - poetry run pre-commit run --hook-stage manual ruff-ci --all-files --show-diff-on-failure + if [[ $(python -V) == *"3.10"* ]]; + then + echo "::add-matcher::.github/ruff-matcher.json"; + poetry run pre-commit run --hook-stage manual ruff-ci --all-files --show-diff-on-failure; + else + poetry run pre-commit run ruff --all-files --show-diff-on-failure; + fi; + - name: Formatting (black) run: poetry run pre-commit run black --all-files --show-diff-on-failure From ed8b99d49589f54f4bc2649f78eb59d570c7645f Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 17:42:44 +0300 Subject: [PATCH 65/76] fix(ci): windows --- .github/workflows/ci.yaml | 3 ++- .pre-commit-config.yaml | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 52b799db4..beaf56544 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,8 +36,9 @@ jobs: run: poetry install --no-interaction - name: Lint (ruff) + shell: bash run: | - if [[ $(python -V) == *"3.10"* ]]; + if [[ "$RUNNER_OS" == "Linux" && $(python -V) == *"3.10"* ]]; then echo "::add-matcher::.github/ruff-matcher.json"; poetry run pre-commit run --hook-stage manual ruff-ci --all-files --show-diff-on-failure; diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4a7560c89..b132953fd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,6 +8,7 @@ repos: language: system types_or: [python] require_serial: true # run once for all files + pass_filenames: false - id: ruff-ci name: ruff-ci entry: ruff From 4a82df19b753a6683aab922ad3b26e514ad66f46 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 17:45:10 +0300 Subject: [PATCH 66/76] chore: lint and format --- kpops/component_handlers/helm_wrapper/helm.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 7039f7150..b1b101b41 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -18,7 +18,9 @@ RepoAuthFlags, Version, ) -from collections.abc import Iterable, Iterator + +if TYPE_CHECKING: + from collections.abc import Iterable, Iterator log = logging.getLogger("Helm") From 966dd85e9eac2ca8369a0c69f6517f88f1ec2b71 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 27 Sep 2023 17:50:31 +0300 Subject: [PATCH 67/76] docs: add ruff badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 9d2aaca2e..9dd25fd9c 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ [![Build status](https://github.com/bakdata/kpops/actions/workflows/ci.yaml/badge.svg)](https://github.com/bakdata/kpops/actions/workflows/ci.yaml) [![pypi](https://img.shields.io/pypi/v/kpops.svg)](https://pypi.org/project/kpops) [![versions](https://img.shields.io/pypi/pyversions/kpops.svg)](https://github.com/bakdata/kpops) +[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![license](https://img.shields.io/github/license/bakdata/kpops.svg)](https://github.com/bakdata/kpops/blob/main/LICENSE) ## Key features From 2712048636918a7653ce502a8f43e722b7f9eccf Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 28 Sep 2023 09:58:20 +0300 Subject: [PATCH 68/76] ci(ruff): configure isort --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index ddb5e8c72..80a9106bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -153,6 +153,9 @@ exclude = ["tests/*snapshots/*"] [tool.ruff.extend-per-file-ignores] "tests/*/__init__.py" = ["F401"] +[tool.ruff.isort] +split-on-trailing-comma = false + [tool.ruff.flake8-bugbear] extend-immutable-calls = ["typer.Argument"] From 22409bf4137fe75e304d981b60c0ef7d1a3996eb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 28 Sep 2023 12:45:38 +0300 Subject: [PATCH 69/76] chore(ruff): remove comment --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 80a9106bf..9e404234d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,7 +100,6 @@ ignore = [ "RET506", # Unnecessary {branch} after raise statement -- Lots of false positives "RET507", # Unnecessary {branch} after continue statement -- Lots of false positives "RET508", # Unnecessary {branch} after break statement -- Lots of false positives - # "TCH001", # Move application import {} into a type-checking block -- Breaks KPOps "PLR09", # upper bound on number of arguments, functions, etc. -- Inconvenient to enforce "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable -- Inconvenient to enforce "PLW2901", # `for` loop variable `{var}` overwritten by assignment target -- Inconvenient to enforce From b255ea3d255d5109e65e4e45eb3d5c544c45b2b5 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 2 Oct 2023 19:11:45 +0300 Subject: [PATCH 70/76] tests: rewrite snapshots --- .../snapshots/snap_test_schema_generation.py | 7 +- tests/pipeline/snapshots/snap_test_example.py | 579 +-- .../pipeline/snapshots/snap_test_pipeline.py | 4307 +++++++++-------- 3 files changed, 2451 insertions(+), 2442 deletions(-) diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index fe596c1df..2dd92b512 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -1,10 +1,13 @@ +# -*- coding: utf-8 -*- # snapshottest: v1 - https://goo.gl/zC4yUc +from __future__ import unicode_literals from snapshottest import Snapshot + snapshots = Snapshot() -snapshots["TestGenSchema.test_gen_pipeline_schema_only_custom_module test-schema-generation"] = """{ +snapshots['TestGenSchema.test_gen_pipeline_schema_only_custom_module test-schema-generation'] = '''{ "definitions": { "EmptyPipelineComponent": { "description": "", @@ -429,4 +432,4 @@ "title": "KPOps pipeline schema", "type": "array" } -""" +''' diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index c8ef073e7..cff924b5f 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -1,351 +1,354 @@ +# -*- coding: utf-8 -*- # snapshottest: v1 - https://goo.gl/zC4yUc +from __future__ import unicode_literals from snapshottest import Snapshot + snapshots = Snapshot() -snapshots["TestExample.test_atm_fraud atm-fraud-pipeline"] = { - "components": [ +snapshots['TestExample.test_atm_fraud atm-fraud-pipeline'] = { + 'components': [ { - "app": { - "debug": True, - "image": "${DOCKER_REGISTRY}/atm-demo-accountproducer", - "imageTag": "1.0.0", - "nameOverride": "account-producer", - "prometheus": { - "jmx": { - "enabled": False, - }, - }, - "replicaCount": 1, - "schedule": "0 12 * * *", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { + 'app': { + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', + 'imageTag': '1.0.0', + 'nameOverride': 'account-producer', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'schedule': '0 12 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { }, - "optimizeLeaveGroupBehavior": False, - "outputTopic": "bakdata-atm-fraud-detection-account-producer-topic", - "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' }, - "suspend": True, + 'suspend': True }, - "name": "account-producer", - "namespace": "${NAMESPACE}", - "prefix": "", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", + 'name': 'account-producer', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - "to": { - "models": { + 'to': { + 'models': { }, - "topics": { - "bakdata-atm-fraud-detection-account-producer-topic": { - "configs": { + 'topics': { + 'bakdata-atm-fraud-detection-account-producer-topic': { + 'configs': { }, - "partitions_count": 3, - }, - }, + 'partitions_count': 3 + } + } }, - "type": "producer-app", - "version": "2.9.0", + 'type': 'producer-app', + 'version': '2.9.0' }, { - "app": { - "commandLine": { - "ITERATION": 20, - "REAL_TX": 19, - }, - "debug": True, - "image": "${DOCKER_REGISTRY}/atm-demo-transactionavroproducer", - "imageTag": "1.0.0", - "nameOverride": "transaction-avro-producer", - "prometheus": { - "jmx": { - "enabled": False, - }, - }, - "replicaCount": 1, - "schedule": "0 12 * * *", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { + 'app': { + 'commandLine': { + 'ITERATION': 20, + 'REAL_TX': 19 + }, + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-transactionavroproducer', + 'imageTag': '1.0.0', + 'nameOverride': 'transaction-avro-producer', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'schedule': '0 12 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { }, - "optimizeLeaveGroupBehavior": False, - "outputTopic": "bakdata-atm-fraud-detection-transaction-avro-producer-topic", - "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' }, - "suspend": True, + 'suspend': True }, - "name": "transaction-avro-producer", - "namespace": "${NAMESPACE}", - "prefix": "", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", + 'name': 'transaction-avro-producer', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - "to": { - "models": { + 'to': { + 'models': { }, - "topics": { - "bakdata-atm-fraud-detection-transaction-avro-producer-topic": { - "configs": { + 'topics': { + 'bakdata-atm-fraud-detection-transaction-avro-producer-topic': { + 'configs': { }, - "partitions_count": 3, - }, - }, + 'partitions_count': 3 + } + } }, - "type": "producer-app", - "version": "2.9.0", + 'type': 'producer-app', + 'version': '2.9.0' }, { - "app": { - "annotations": { - "consumerGroup": "atm-transactionjoiner-atm-fraud-joinedtransactions-topic", - }, - "commandLine": { - "PRODUCTIVE": False, - }, - "debug": True, - "image": "${DOCKER_REGISTRY}/atm-demo-transactionjoiner", - "imageTag": "1.0.0", - "labels": { - "pipeline": "bakdata-atm-fraud-detection", - }, - "nameOverride": "transaction-joiner", - "prometheus": { - "jmx": { - "enabled": False, - }, - }, - "replicaCount": 1, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "errorTopic": "bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic", - "inputTopics": [ - "bakdata-atm-fraud-detection-transaction-avro-producer-topic", + 'app': { + 'annotations': { + 'consumerGroup': 'atm-transactionjoiner-atm-fraud-joinedtransactions-topic' + }, + 'commandLine': { + 'PRODUCTIVE': False + }, + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-transactionjoiner', + 'imageTag': '1.0.0', + 'labels': { + 'pipeline': 'bakdata-atm-fraud-detection' + }, + 'nameOverride': 'transaction-joiner', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic', + 'inputTopics': [ + 'bakdata-atm-fraud-detection-transaction-avro-producer-topic' ], - "optimizeLeaveGroupBehavior": False, - "outputTopic": "bakdata-atm-fraud-detection-transaction-joiner-topic", - "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", - }, + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + } }, - "name": "transaction-joiner", - "namespace": "${NAMESPACE}", - "prefix": "", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", + 'name': 'transaction-joiner', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - "to": { - "models": { + 'to': { + 'models': { }, - "topics": { - "bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic": { - "configs": { + 'topics': { + 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic': { + 'configs': { }, - "partitions_count": 1, - "type": "error", + 'partitions_count': 1, + 'type': 'error' }, - "bakdata-atm-fraud-detection-transaction-joiner-topic": { - "configs": { + 'bakdata-atm-fraud-detection-transaction-joiner-topic': { + 'configs': { }, - "partitions_count": 3, - }, - }, + 'partitions_count': 3 + } + } }, - "type": "streams-app", - "version": "2.9.0", + 'type': 'streams-app', + 'version': '2.9.0' }, { - "app": { - "annotations": { - "consumerGroup": "atm-frauddetector-atm-fraud-possiblefraudtransactions-topic", - }, - "commandLine": { - "PRODUCTIVE": False, - }, - "debug": True, - "image": "${DOCKER_REGISTRY}/atm-demo-frauddetector", - "imageTag": "1.0.0", - "labels": { - "pipeline": "bakdata-atm-fraud-detection", - }, - "nameOverride": "fraud-detector", - "prometheus": { - "jmx": { - "enabled": False, - }, - }, - "replicaCount": 1, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "errorTopic": "bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic", - "inputTopics": [ - "bakdata-atm-fraud-detection-transaction-joiner-topic", + 'app': { + 'annotations': { + 'consumerGroup': 'atm-frauddetector-atm-fraud-possiblefraudtransactions-topic' + }, + 'commandLine': { + 'PRODUCTIVE': False + }, + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-frauddetector', + 'imageTag': '1.0.0', + 'labels': { + 'pipeline': 'bakdata-atm-fraud-detection' + }, + 'nameOverride': 'fraud-detector', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic', + 'inputTopics': [ + 'bakdata-atm-fraud-detection-transaction-joiner-topic' ], - "optimizeLeaveGroupBehavior": False, - "outputTopic": "bakdata-atm-fraud-detection-fraud-detector-topic", - "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", - }, + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + } }, - "name": "fraud-detector", - "namespace": "${NAMESPACE}", - "prefix": "", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", + 'name': 'fraud-detector', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - "to": { - "models": { + 'to': { + 'models': { }, - "topics": { - "bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic": { - "configs": { + 'topics': { + 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic': { + 'configs': { }, - "partitions_count": 1, - "type": "error", + 'partitions_count': 1, + 'type': 'error' }, - "bakdata-atm-fraud-detection-fraud-detector-topic": { - "configs": { + 'bakdata-atm-fraud-detection-fraud-detector-topic': { + 'configs': { }, - "partitions_count": 3, - }, - }, + 'partitions_count': 3 + } + } }, - "type": "streams-app", - "version": "2.9.0", + 'type': 'streams-app', + 'version': '2.9.0' }, { - "app": { - "annotations": { - "consumerGroup": "atm-accountlinker-atm-fraud-output-topic", - }, - "commandLine": { - "PRODUCTIVE": False, - }, - "debug": True, - "image": "${DOCKER_REGISTRY}/atm-demo-accountlinker", - "imageTag": "1.0.0", - "labels": { - "pipeline": "bakdata-atm-fraud-detection", - }, - "nameOverride": "account-linker", - "prometheus": { - "jmx": { - "enabled": False, + 'app': { + 'annotations': { + 'consumerGroup': 'atm-accountlinker-atm-fraud-output-topic' + }, + 'commandLine': { + 'PRODUCTIVE': False + }, + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-accountlinker', + 'imageTag': '1.0.0', + 'labels': { + 'pipeline': 'bakdata-atm-fraud-detection' + }, + 'nameOverride': 'account-linker', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic', + 'extraInputTopics': { + 'accounts': [ + 'bakdata-atm-fraud-detection-account-producer-topic' + ] }, - }, - "replicaCount": 1, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "errorTopic": "bakdata-atm-fraud-detection-account-linker-dead-letter-topic", - "extraInputTopics": { - "accounts": [ - "bakdata-atm-fraud-detection-account-producer-topic", - ], - }, - "inputTopics": [ - "bakdata-atm-fraud-detection-fraud-detector-topic", + 'inputTopics': [ + 'bakdata-atm-fraud-detection-fraud-detector-topic' ], - "optimizeLeaveGroupBehavior": False, - "outputTopic": "bakdata-atm-fraud-detection-account-linker-topic", - "schemaRegistryUrl": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", - }, + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + } }, - "from": { - "components": { - "account-producer": { - "role": "accounts", + 'from': { + 'components': { + 'account-producer': { + 'role': 'accounts' }, - "fraud-detector": { - "type": "input", - }, - }, - "topics": { + 'fraud-detector': { + 'type': 'input' + } }, + 'topics': { + } }, - "name": "account-linker", - "namespace": "${NAMESPACE}", - "prefix": "", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", + 'name': 'account-linker', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - "to": { - "models": { + 'to': { + 'models': { }, - "topics": { - "bakdata-atm-fraud-detection-account-linker-dead-letter-topic": { - "configs": { + 'topics': { + 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic': { + 'configs': { }, - "partitions_count": 1, - "type": "error", + 'partitions_count': 1, + 'type': 'error' }, - "bakdata-atm-fraud-detection-account-linker-topic": { - "configs": { + 'bakdata-atm-fraud-detection-account-linker-topic': { + 'configs': { }, - "partitions_count": 3, - }, - }, + 'partitions_count': 3 + } + } }, - "type": "streams-app", - "version": "2.9.0", + 'type': 'streams-app', + 'version': '2.9.0' }, { - "app": { - "auto.create": True, - "connection.ds.pool.size": 5, - "connection.password": "AppPassword", - "connection.url": "jdbc:postgresql://postgresql-dev.kpops.svc.cluster.local:5432/app_db", - "connection.user": "app1", - "connector.class": "io.confluent.connect.jdbc.JdbcSinkConnector", - "errors.deadletterqueue.context.headers.enable": True, - "errors.deadletterqueue.topic.name": "postgres-request-sink-dead-letters", - "errors.deadletterqueue.topic.replication.factor": 1, - "errors.tolerance": "all", - "insert.mode": "insert", - "insert.mode.databaselevel": True, - "key.converter": "org.apache.kafka.connect.storage.StringConverter", - "name": "postgresql-connector", - "pk.mode": "record_value", - "table.name.format": "fraud_transactions", - "tasks.max": 1, - "topics": "bakdata-atm-fraud-detection-account-linker-topic", - "transforms": "flatten", - "transforms.flatten.type": "org.apache.kafka.connect.transforms.Flatten$Value", - "value.converter": "io.confluent.connect.avro.AvroConverter", - "value.converter.schema.registry.url": "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081", + 'app': { + 'auto.create': True, + 'connection.ds.pool.size': 5, + 'connection.password': 'AppPassword', + 'connection.url': 'jdbc:postgresql://postgresql-dev.kpops.svc.cluster.local:5432/app_db', + 'connection.user': 'app1', + 'connector.class': 'io.confluent.connect.jdbc.JdbcSinkConnector', + 'errors.deadletterqueue.context.headers.enable': True, + 'errors.deadletterqueue.topic.name': 'postgres-request-sink-dead-letters', + 'errors.deadletterqueue.topic.replication.factor': 1, + 'errors.tolerance': 'all', + 'insert.mode': 'insert', + 'insert.mode.databaselevel': True, + 'key.converter': 'org.apache.kafka.connect.storage.StringConverter', + 'name': 'postgresql-connector', + 'pk.mode': 'record_value', + 'table.name.format': 'fraud_transactions', + 'tasks.max': 1, + 'topics': 'bakdata-atm-fraud-detection-account-linker-topic', + 'transforms': 'flatten', + 'transforms.flatten.type': 'org.apache.kafka.connect.transforms.Flatten$Value', + 'value.converter': 'io.confluent.connect.avro.AvroConverter', + 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' }, - "name": "postgresql-connector", - "namespace": "${NAMESPACE}", - "prefix": "", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/", + 'name': 'postgresql-connector', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' }, - "resetter_values": { + 'resetter_values': { }, - "type": "kafka-sink-connector", - "version": "1.0.4", - }, - ], + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + } + ] } diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py index 2a63afd83..c2e339fbc 100644 --- a/tests/pipeline/snapshots/snap_test_pipeline.py +++ b/tests/pipeline/snapshots/snap_test_pipeline.py @@ -1,2306 +1,2309 @@ +# -*- coding: utf-8 -*- # snapshottest: v1 - https://goo.gl/zC4yUc +from __future__ import unicode_literals from snapshottest import Snapshot + snapshots = Snapshot() -snapshots["TestPipeline.test_default_config test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_default_config test-pipeline'] = { + 'components': [ { - "app": { - "nameOverride": "resources-custom-config-app1", - "resources": { - "limits": { - "memory": "2G", - }, - "requests": { - "memory": "2G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "resources-custom-config-app1", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "app1", - "namespace": "development-namespace", - "prefix": "resources-custom-config-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-custom-config-app1": { - "configs": { - }, - "partitions_count": 3, - "type": "output", - }, - }, - }, - "type": "producer-app", - "version": "2.9.0", + 'app': { + 'nameOverride': 'resources-custom-config-app1', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-custom-config-app1', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-custom-config-app1': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' }, { - "app": { - "image": "some-image", - "labels": { - "pipeline": "resources-custom-config", - }, - "nameOverride": "resources-custom-config-app2", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "errorTopic": "resources-custom-config-app2-error", - "inputTopics": [ - "resources-custom-config-app1", + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'nameOverride': 'resources-custom-config-app2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'resources-custom-config-app2-error', + 'inputTopics': [ + 'resources-custom-config-app1' ], - "outputTopic": "resources-custom-config-app2", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "app2", - "namespace": "development-namespace", - "prefix": "resources-custom-config-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-custom-config-app2": { - "configs": { - }, - "partitions_count": 3, - "type": "output", - }, - "resources-custom-config-app2-error": { - "configs": { - }, - "partitions_count": 1, - "type": "error", - }, - }, - }, - "type": "streams-app", - "version": "2.9.0", - }, - ], + 'outputTopic': 'resources-custom-config-app2', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-custom-config-app2': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + }, + 'resources-custom-config-app2-error': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } + ] } -snapshots["TestPipeline.test_inflate_pipeline test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_inflate_pipeline test-pipeline'] = { + 'components': [ { - "app": { - "commandLine": { - "FAKE_ARG": "fake-arg-value", - }, - "image": "example-registry/fake-image", - "imageTag": "0.0.1", - "nameOverride": "resources-pipeline-with-inflate-scheduled-producer", - "schedule": "30 3/8 * * *", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "resources-pipeline-with-inflate-scheduled-producer", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "scheduled-producer", - "namespace": "example-namespace", - "prefix": "resources-pipeline-with-inflate-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - "com/bakdata/kafka/fake": "1.0.0", - }, - "topics": { - "resources-pipeline-with-inflate-scheduled-producer": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 12, - "type": "output", - "value_schema": "com.bakdata.fake.Produced", - }, - }, - }, - "type": "scheduled-producer", - "version": "2.4.2", + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'nameOverride': 'resources-pipeline-with-inflate-scheduled-producer', + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' + }, + 'topics': { + 'resources-pipeline-with-inflate-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' + } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "converter-resources-pipeline-with-inflate-converter", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 1, - "minReplicas": 0, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ - ], - }, - "commandLine": { - "CONVERT_XML": True, - }, - "nameOverride": "resources-pipeline-with-inflate-converter", - "resources": { - "limits": { - "memory": "2G", - }, - "requests": { - "memory": "2G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-pipeline-with-inflate-converter-error", - "inputTopics": [ - "resources-pipeline-with-inflate-scheduled-producer", + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-pipeline-with-inflate-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-pipeline-with-inflate-converter', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-converter-error', + 'inputTopics': [ + 'resources-pipeline-with-inflate-scheduled-producer' ], - "outputTopic": "resources-pipeline-with-inflate-converter", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "converter", - "namespace": "example-namespace", - "prefix": "resources-pipeline-with-inflate-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-pipeline-with-inflate-converter": { - "configs": { - "cleanup.policy": "compact,delete", - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-pipeline-with-inflate-converter-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 10, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "converter", - "version": "2.4.2", + 'outputTopic': 'resources-pipeline-with-inflate-converter', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-inflate-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-inflate-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'converter', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "filter-resources-pipeline-with-inflate-should-inflate", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 4, - "minReplicas": 4, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ - "resources-pipeline-with-inflate-should-inflate", + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-pipeline-with-inflate-should-inflate', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-pipeline-with-inflate-should-inflate' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'resources-pipeline-with-inflate-should-inflate', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-error', + 'inputTopics': [ + 'resources-pipeline-with-inflate-converter' ], - }, - "commandLine": { - "TYPE": "nothing", - }, - "image": "fake-registry/filter", - "imageTag": "2.4.1", - "nameOverride": "resources-pipeline-with-inflate-should-inflate", - "replicaCount": 4, - "resources": { - "requests": { - "memory": "3G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-pipeline-with-inflate-should-inflate-error", - "inputTopics": [ - "resources-pipeline-with-inflate-converter", - ], - "outputTopic": "resources-pipeline-with-inflate-should-inflate", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "should-inflate", - "namespace": "example-namespace", - "prefix": "resources-pipeline-with-inflate-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-pipeline-with-inflate-should-inflate": { - "configs": { - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-pipeline-with-inflate-should-inflate-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "should-inflate", - "version": "2.4.2", + 'outputTopic': 'resources-pipeline-with-inflate-should-inflate', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'should-inflate', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-inflate-should-inflate': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-inflate-should-inflate-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' }, { - "app": { - "batch.size": "2000", - "behavior.on.malformed.documents": "warn", - "behavior.on.null.values": "delete", - "connection.compression": "true", - "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", - "key.ignore": "false", - "linger.ms": "5000", - "max.buffered.records": "20000", - "name": "resources-pipeline-with-inflate-should-inflate-inflated-sink-connector", - "read.timeout.ms": "120000", - "tasks.max": "1", - "topics": "resources-pipeline-with-inflate-should-inflate", - "transforms.changeTopic.replacement": "resources-pipeline-with-inflate-should-inflate-index-v1", - }, - "name": "should-inflate-inflated-sink-connector", - "namespace": "example-namespace", - "prefix": "resources-pipeline-with-inflate-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/", - }, - "resetter_values": { - }, - "to": { - "models": { - }, - "topics": { - "kafka-sink-connector": { - "configs": { - }, - "type": "output", - }, - "should-inflate-inflated-sink-connector": { - "configs": { - }, - "role": "test", - }, - }, - }, - "type": "kafka-sink-connector", - "version": "1.0.4", + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-pipeline-with-inflate-should-inflate', + 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1' + }, + 'name': 'should-inflate-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { + }, + 'topics': { + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + }, + 'should-inflate-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' }, { - "app": { - "nameOverride": "resources-pipeline-with-inflate-should-inflate-inflated-streams-app", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error", - "inputTopics": [ - "kafka-sink-connector", + 'app': { + 'nameOverride': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' ], - "outputTopic": "resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "should-inflate-inflated-streams-app", - "namespace": "example-namespace", - "prefix": "resources-pipeline-with-inflate-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - "resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app": { - "configs": { - }, - "type": "output", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", - }, - ], + 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'should-inflate-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + }, + 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } + ] } -snapshots["TestPipeline.test_kafka_connect_sink_weave_from_topics test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_kafka_connect_sink_weave_from_topics test-pipeline'] = { + 'components': [ { - "app": { - "image": "fake-image", - "nameOverride": "resources-kafka-connect-sink-streams-app", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-kafka-connect-sink-streams-app-error", - "inputTopics": [ - "example-topic", + 'app': { + 'image': 'fake-image', + 'nameOverride': 'resources-kafka-connect-sink-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-kafka-connect-sink-streams-app-error', + 'inputTopics': [ + 'example-topic' ], - "outputTopic": "example-output", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - }, - "topics": { - "example-topic": { - "type": "input", - }, - }, - }, - "name": "streams-app", - "namespace": "example-namespace", - "prefix": "resources-kafka-connect-sink-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "example-output": { - "configs": { - }, - "type": "output", - }, - "resources-kafka-connect-sink-streams-app-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + }, + 'topics': { + 'example-topic': { + 'type': 'input' + } + } + }, + 'name': 'streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-kafka-connect-sink-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' }, { - "app": { - "batch.size": "2000", - "behavior.on.malformed.documents": "warn", - "behavior.on.null.values": "delete", - "connection.compression": "true", - "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", - "key.ignore": "false", - "linger.ms": "5000", - "max.buffered.records": "20000", - "name": "resources-kafka-connect-sink-es-sink-connector", - "read.timeout.ms": "120000", - "tasks.max": "1", - "topics": "example-output", - }, - "name": "es-sink-connector", - "namespace": "example-namespace", - "prefix": "resources-kafka-connect-sink-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/", - }, - "resetter_values": { - }, - "type": "kafka-sink-connector", - "version": "1.0.4", - }, - ], + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-kafka-connect-sink-es-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'example-output' + }, + 'name': 'es-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + } + ] } -snapshots["TestPipeline.test_load_pipeline test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_load_pipeline test-pipeline'] = { + 'components': [ { - "app": { - "commandLine": { - "FAKE_ARG": "fake-arg-value", - }, - "image": "example-registry/fake-image", - "imageTag": "0.0.1", - "nameOverride": "resources-first-pipeline-scheduled-producer", - "schedule": "30 3/8 * * *", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "resources-first-pipeline-scheduled-producer", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "scheduled-producer", - "namespace": "example-namespace", - "prefix": "resources-first-pipeline-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - "com/bakdata/kafka/fake": "1.0.0", - }, - "topics": { - "resources-first-pipeline-scheduled-producer": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 12, - "type": "output", - "value_schema": "com.bakdata.fake.Produced", - }, - }, - }, - "type": "scheduled-producer", - "version": "2.4.2", + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'nameOverride': 'resources-first-pipeline-scheduled-producer', + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-first-pipeline-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' + }, + 'topics': { + 'resources-first-pipeline-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' + } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "converter-resources-first-pipeline-converter", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 1, - "minReplicas": 0, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-first-pipeline-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-first-pipeline-converter', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-first-pipeline-converter-error', + 'inputTopics': [ + 'resources-first-pipeline-scheduled-producer' ], - }, - "commandLine": { - "CONVERT_XML": True, - }, - "nameOverride": "resources-first-pipeline-converter", - "resources": { - "limits": { - "memory": "2G", - }, - "requests": { - "memory": "2G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-first-pipeline-converter-error", - "inputTopics": [ - "resources-first-pipeline-scheduled-producer", - ], - "outputTopic": "resources-first-pipeline-converter", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "converter", - "namespace": "example-namespace", - "prefix": "resources-first-pipeline-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-first-pipeline-converter": { - "configs": { - "cleanup.policy": "compact,delete", - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-first-pipeline-converter-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 10, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "converter", - "version": "2.4.2", + 'outputTopic': 'resources-first-pipeline-converter', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-first-pipeline-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-first-pipeline-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'converter', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 4, - "minReplicas": 4, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ - "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", - ], - }, - "commandLine": { - "TYPE": "nothing", - }, - "image": "fake-registry/filter", - "imageTag": "2.4.1", - "nameOverride": "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", - "replicaCount": 4, - "resources": { - "requests": { - "memory": "3G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error", - "inputTopics": [ - "resources-first-pipeline-converter", + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error', + 'inputTopics': [ + 'resources-first-pipeline-converter' ], - "outputTopic": "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name", - "namespace": "example-namespace", - "prefix": "resources-first-pipeline-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name": { - "configs": { - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "filter", - "version": "2.4.2", - }, - ], + 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'filter', + 'version': '2.4.2' + } + ] } -snapshots["TestPipeline.test_model_serialization test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_model_serialization test-pipeline'] = { + 'components': [ { - "app": { - "nameOverride": "resources-pipeline-with-paths-account-producer", - "streams": { - "brokers": "test", - "extraOutputTopics": { - }, - "outputTopic": "out", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "account-producer", - "namespace": "test", - "prefix": "resources-pipeline-with-paths-", - "repo_config": { - "repo_auth_flags": { - "ca_file": "my-cert.cert", - "insecure_skip_tls_verify": False, - "password": "$CI_JOB_TOKEN", - "username": "masked", - }, - "repository_name": "masked", - "url": "masked", - }, - "type": "producer-app", - "version": "2.4.2", - }, - ], + 'app': { + 'nameOverride': 'resources-pipeline-with-paths-account-producer', + 'streams': { + 'brokers': 'test', + 'extraOutputTopics': { + }, + 'outputTopic': 'out', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'account-producer', + 'namespace': 'test', + 'prefix': 'resources-pipeline-with-paths-', + 'repo_config': { + 'repo_auth_flags': { + 'ca_file': 'my-cert.cert', + 'insecure_skip_tls_verify': False, + 'password': '$CI_JOB_TOKEN', + 'username': 'masked' + }, + 'repository_name': 'masked', + 'url': 'masked' + }, + 'type': 'producer-app', + 'version': '2.4.2' + } + ] } -snapshots["TestPipeline.test_no_input_topic test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_no_input_topic test-pipeline'] = { + 'components': [ { - "app": { - "commandLine": { - "CONVERT_XML": True, - }, - "nameOverride": "resources-no-input-topic-pipeline-app1", - "resources": { - "limits": { - "memory": "2G", - }, - "requests": { - "memory": "2G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-no-input-topic-pipeline-app1-error", - "inputPattern": ".*", - "outputTopic": "example-output", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - }, - "topics": { - ".*": { - "type": "pattern", - }, - }, - }, - "name": "app1", - "namespace": "example-namespace", - "prefix": "resources-no-input-topic-pipeline-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "example-output": { - "configs": { - }, - "type": "output", - }, - "resources-no-input-topic-pipeline-app1-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", + 'app': { + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-no-input-topic-pipeline-app1', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-input-topic-pipeline-app1-error', + 'inputPattern': '.*', + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + }, + 'topics': { + '.*': { + 'type': 'pattern' + } + } + }, + 'name': 'app1', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-input-topic-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-no-input-topic-pipeline-app1-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' }, { - "app": { - "nameOverride": "resources-no-input-topic-pipeline-app2", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-no-input-topic-pipeline-app2-error", - "extraOutputTopics": { - "extra": "example-output-extra", - "test-output": "test-output-extra", - }, - "inputTopics": [ - "example-output", + 'app': { + 'nameOverride': 'resources-no-input-topic-pipeline-app2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-input-topic-pipeline-app2-error', + 'extraOutputTopics': { + 'extra': 'example-output-extra', + 'test-output': 'test-output-extra' + }, + 'inputTopics': [ + 'example-output' ], - "schemaRegistryUrl": "http://localhost:8081", - }, + 'schemaRegistryUrl': 'http://localhost:8081' + } }, - "name": "app2", - "namespace": "example-namespace", - "prefix": "resources-no-input-topic-pipeline-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, + 'name': 'app2', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-input-topic-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - "to": { - "models": { + 'to': { + 'models': { }, - "topics": { - "example-output-extra": { - "configs": { + 'topics': { + 'example-output-extra': { + 'configs': { }, - "role": "extra", + 'role': 'extra' }, - "resources-no-input-topic-pipeline-app2-error": { - "configs": { - "cleanup.policy": "compact,delete", + 'resources-no-input-topic-pipeline-app2-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' }, - "test-output-extra": { - "configs": { + 'test-output-extra': { + 'configs': { }, - "role": "test-output", - }, - }, + 'role': 'test-output' + } + } }, - "type": "streams-app", - "version": "2.4.2", - }, - ], + 'type': 'streams-app', + 'version': '2.4.2' + } + ] } -snapshots["TestPipeline.test_no_user_defined_components test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_no_user_defined_components test-pipeline'] = { + 'components': [ { - "app": { - "image": "fake-image", - "nameOverride": "resources-no-user-defined-components-streams-app", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-no-user-defined-components-streams-app-error", - "inputTopics": [ - "example-topic", + 'app': { + 'image': 'fake-image', + 'nameOverride': 'resources-no-user-defined-components-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-user-defined-components-streams-app-error', + 'inputTopics': [ + 'example-topic' ], - "outputTopic": "example-output", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - }, - "topics": { - "example-topic": { - "type": "input", - }, - }, - }, - "name": "streams-app", - "namespace": "example-namespace", - "prefix": "resources-no-user-defined-components-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "example-output": { - "configs": { - }, - "type": "output", - }, - "resources-no-user-defined-components-streams-app-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", - }, - ], + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + }, + 'topics': { + 'example-topic': { + 'type': 'input' + } + } + }, + 'name': 'streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-user-defined-components-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-no-user-defined-components-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } + ] } -snapshots["TestPipeline.test_pipelines_with_env_values test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_pipelines_with_env_values test-pipeline'] = { + 'components': [ { - "app": { - "commandLine": { - "FAKE_ARG": "override-arg", - }, - "image": "example-registry/fake-image", - "imageTag": "0.0.1", - "nameOverride": "resources-pipeline-with-envs-input-producer", - "schedule": "20 3/8 * * *", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "resources-pipeline-with-envs-input-producer", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "input-producer", - "namespace": "example-namespace", - "prefix": "resources-pipeline-with-envs-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - "com/bakdata/kafka/fake": "1.0.0", - }, - "topics": { - "resources-pipeline-with-envs-input-producer": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 12, - "type": "output", - "value_schema": "com.bakdata.fake.Produced", - }, - }, - }, - "type": "scheduled-producer", - "version": "2.4.2", + 'app': { + 'commandLine': { + 'FAKE_ARG': 'override-arg' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'nameOverride': 'resources-pipeline-with-envs-input-producer', + 'schedule': '20 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-pipeline-with-envs-input-producer', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'input-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' + }, + 'topics': { + 'resources-pipeline-with-envs-input-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' + } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "converter-resources-pipeline-with-envs-converter", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 1, - "minReplicas": 0, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-pipeline-with-envs-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-pipeline-with-envs-converter', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-envs-converter-error', + 'inputTopics': [ + 'resources-pipeline-with-envs-input-producer' ], - }, - "commandLine": { - "CONVERT_XML": True, - }, - "nameOverride": "resources-pipeline-with-envs-converter", - "resources": { - "limits": { - "memory": "2G", - }, - "requests": { - "memory": "2G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-pipeline-with-envs-converter-error", - "inputTopics": [ - "resources-pipeline-with-envs-input-producer", - ], - "outputTopic": "resources-pipeline-with-envs-converter", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "converter", - "namespace": "example-namespace", - "prefix": "resources-pipeline-with-envs-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-pipeline-with-envs-converter": { - "configs": { - "cleanup.policy": "compact,delete", - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-pipeline-with-envs-converter-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 10, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "converter", - "version": "2.4.2", + 'outputTopic': 'resources-pipeline-with-envs-converter', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-envs-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-envs-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'converter', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "filter-resources-pipeline-with-envs-filter", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 4, - "minReplicas": 4, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ - "resources-pipeline-with-envs-filter", + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-pipeline-with-envs-filter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-pipeline-with-envs-filter' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'resources-pipeline-with-envs-filter', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-envs-filter-error', + 'inputTopics': [ + 'resources-pipeline-with-envs-converter' ], - }, - "commandLine": { - "TYPE": "nothing", - }, - "image": "fake-registry/filter", - "imageTag": "2.4.1", - "nameOverride": "resources-pipeline-with-envs-filter", - "replicaCount": 4, - "resources": { - "requests": { - "memory": "3G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-pipeline-with-envs-filter-error", - "inputTopics": [ - "resources-pipeline-with-envs-converter", - ], - "outputTopic": "resources-pipeline-with-envs-filter", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "filter", - "namespace": "example-namespace", - "prefix": "resources-pipeline-with-envs-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-pipeline-with-envs-filter": { - "configs": { - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-pipeline-with-envs-filter-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "filter", - "version": "2.4.2", - }, - ], + 'outputTopic': 'resources-pipeline-with-envs-filter', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'filter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-envs-filter': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-envs-filter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'filter', + 'version': '2.4.2' + } + ] } -snapshots["TestPipeline.test_prefix_pipeline_component test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_prefix_pipeline_component test-pipeline'] = { + 'components': [ { - "app": { - "debug": True, - "image": "${DOCKER_REGISTRY}/atm-demo-accountproducer", - "imageTag": "1.0.0", - "nameOverride": "from-pipeline-component-account-producer", - "prometheus": { - "jmx": { - "enabled": False, - }, - }, - "replicaCount": 1, - "schedule": "0 12 * * *", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "schemaRegistryUrl": "http://localhost:8081", - }, - "suspend": True, - }, - "name": "account-producer", - "namespace": "${NAMESPACE}", - "prefix": "from-pipeline-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "type": "producer-app", - "version": "2.9.0", - }, - ], + 'app': { + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', + 'imageTag': '1.0.0', + 'nameOverride': 'from-pipeline-component-account-producer', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'schedule': '0 12 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'schemaRegistryUrl': 'http://localhost:8081' + }, + 'suspend': True + }, + 'name': 'account-producer', + 'namespace': '${NAMESPACE}', + 'prefix': 'from-pipeline-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'type': 'producer-app', + 'version': '2.9.0' + } + ] } -snapshots["TestPipeline.test_read_from_component test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_read_from_component test-pipeline'] = { + 'components': [ { - "app": { - "nameOverride": "resources-read-from-component-producer1", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "resources-read-from-component-producer1", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "producer1", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-producer1": { - "configs": { - }, - "type": "output", - }, - }, - }, - "type": "producer-app", - "version": "2.4.2", + 'app': { + 'nameOverride': 'resources-read-from-component-producer1', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-read-from-component-producer1', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'producer1', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-producer1': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.4.2' }, { - "app": { - "nameOverride": "producer2", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "resources-read-from-component-producer2", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "producer2", - "namespace": "example-namespace", - "prefix": "", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-producer2": { - "configs": { - }, - "type": "output", - }, - }, - }, - "type": "producer-app", - "version": "2.4.2", + 'app': { + 'nameOverride': 'producer2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-read-from-component-producer2', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'producer2', + 'namespace': 'example-namespace', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-producer2': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "filter-resources-read-from-component-inflate-step", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 1, - "minReplicas": 0, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ - "resources-read-from-component-inflate-step", + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-read-from-component-inflate-step', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-read-from-component-inflate-step' + ] + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'resources-read-from-component-inflate-step', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-error', + 'inputTopics': [ + 'resources-read-from-component-producer2' ], - }, - "image": "fake-registry/filter", - "imageTag": "2.4.1", - "nameOverride": "resources-read-from-component-inflate-step", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-inflate-step-error", - "inputTopics": [ - "resources-read-from-component-producer2", - ], - "outputTopic": "resources-read-from-component-inflate-step", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "inflate-step", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-inflate-step": { - "configs": { - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-read-from-component-inflate-step-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "should-inflate", - "version": "2.4.2", + 'outputTopic': 'resources-read-from-component-inflate-step', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'inflate-step', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-inflate-step': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' }, { - "app": { - "batch.size": "2000", - "behavior.on.malformed.documents": "warn", - "behavior.on.null.values": "delete", - "connection.compression": "true", - "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", - "key.ignore": "false", - "linger.ms": "5000", - "max.buffered.records": "20000", - "name": "resources-read-from-component-inflate-step-inflated-sink-connector", - "read.timeout.ms": "120000", - "tasks.max": "1", - "topics": "resources-read-from-component-inflate-step", - "transforms.changeTopic.replacement": "resources-read-from-component-inflate-step-index-v1", - }, - "name": "inflate-step-inflated-sink-connector", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/", - }, - "resetter_values": { - }, - "to": { - "models": { - }, - "topics": { - "inflate-step-inflated-sink-connector": { - "configs": { - }, - "role": "test", - }, - "kafka-sink-connector": { - "configs": { - }, - "type": "output", - }, - }, - }, - "type": "kafka-sink-connector", - "version": "1.0.4", + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-read-from-component-inflate-step-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-read-from-component-inflate-step', + 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1' + }, + 'name': 'inflate-step-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { + }, + 'topics': { + 'inflate-step-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + }, + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' }, { - "app": { - "nameOverride": "resources-read-from-component-inflate-step-inflated-streams-app", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-inflate-step-inflated-streams-app-error", - "inputTopics": [ - "kafka-sink-connector", + 'app': { + 'nameOverride': 'resources-read-from-component-inflate-step-inflated-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' ], - "outputTopic": "resources-read-from-component-inflate-step-inflate-step-inflated-streams-app", - "schemaRegistryUrl": "http://localhost:8081", - }, + 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081' + } }, - "name": "inflate-step-inflated-streams-app", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, + 'name': 'inflate-step-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - "to": { - "models": { + 'to': { + 'models': { }, - "topics": { - "resources-read-from-component-inflate-step-inflate-step-inflated-streams-app": { - "configs": { + 'topics': { + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app': { + 'configs': { }, - "type": "output", + 'type': 'output' }, - "resources-read-from-component-inflate-step-inflated-streams-app-error": { - "configs": { - "cleanup.policy": "compact,delete", + 'resources-read-from-component-inflate-step-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } }, - "type": "streams-app", - "version": "2.4.2", + 'type': 'streams-app', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "filter-resources-read-from-component-inflate-step-without-prefix", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 1, - "minReplicas": 0, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ - "resources-read-from-component-inflate-step-without-prefix", - ], - }, - "image": "fake-registry/filter", - "imageTag": "2.4.1", - "nameOverride": "inflate-step-without-prefix", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-inflate-step-without-prefix-error", - "inputTopics": [ - "resources-read-from-component-inflate-step-inflate-step-inflated-streams-app", + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-read-from-component-inflate-step-without-prefix', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-read-from-component-inflate-step-without-prefix' + ] + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'inflate-step-without-prefix', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-error', + 'inputTopics': [ + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' ], - "outputTopic": "resources-read-from-component-inflate-step-without-prefix", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "inflate-step-without-prefix", - "namespace": "example-namespace", - "prefix": "", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-inflate-step-without-prefix": { - "configs": { - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-read-from-component-inflate-step-without-prefix-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "should-inflate", - "version": "2.4.2", + 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'inflate-step-without-prefix', + 'namespace': 'example-namespace', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-inflate-step-without-prefix': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-without-prefix-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' }, { - "app": { - "batch.size": "2000", - "behavior.on.malformed.documents": "warn", - "behavior.on.null.values": "delete", - "connection.compression": "true", - "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", - "key.ignore": "false", - "linger.ms": "5000", - "max.buffered.records": "20000", - "name": "resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector", - "read.timeout.ms": "120000", - "tasks.max": "1", - "topics": "resources-read-from-component-inflate-step-without-prefix", - "transforms.changeTopic.replacement": "resources-read-from-component-inflate-step-without-prefix-index-v1", - }, - "name": "inflate-step-without-prefix-inflated-sink-connector", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/", - }, - "resetter_values": { - }, - "to": { - "models": { - }, - "topics": { - "inflate-step-without-prefix-inflated-sink-connector": { - "configs": { - }, - "role": "test", - }, - "kafka-sink-connector": { - "configs": { - }, - "type": "output", - }, - }, - }, - "type": "kafka-sink-connector", - "version": "1.0.4", + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-read-from-component-inflate-step-without-prefix', + 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1' + }, + 'name': 'inflate-step-without-prefix-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { + }, + 'topics': { + 'inflate-step-without-prefix-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + }, + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' }, { - "app": { - "nameOverride": "resources-read-from-component-inflate-step-without-prefix-inflated-streams-app", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error", - "inputTopics": [ - "kafka-sink-connector", + 'app': { + 'nameOverride': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' ], - "outputTopic": "inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app", - "schemaRegistryUrl": "http://localhost:8081", - }, + 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081' + } }, - "name": "inflate-step-without-prefix-inflated-streams-app", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, + 'name': 'inflate-step-without-prefix-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - "to": { - "models": { + 'to': { + 'models': { }, - "topics": { - "inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app": { - "configs": { + 'topics': { + 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app': { + 'configs': { }, - "type": "output", + 'type': 'output' }, - "resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error": { - "configs": { - "cleanup.policy": "compact,delete", + 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } }, - "type": "streams-app", - "version": "2.4.2", + 'type': 'streams-app', + 'version': '2.4.2' }, { - "app": { - "nameOverride": "resources-read-from-component-consumer1", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-consumer1-error", - "inputTopics": [ - "resources-read-from-component-producer1", + 'app': { + 'nameOverride': 'resources-read-from-component-consumer1', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer1-error', + 'inputTopics': [ + 'resources-read-from-component-producer1' ], - "outputTopic": "resources-read-from-component-consumer1", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - "producer1": { - "type": "input", - }, - }, - "topics": { - }, - }, - "name": "consumer1", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-consumer1": { - "configs": { - }, - "type": "output", - }, - "resources-read-from-component-consumer1-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", + 'outputTopic': 'resources-read-from-component-consumer1', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + 'producer1': { + 'type': 'input' + } + }, + 'topics': { + } + }, + 'name': 'consumer1', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer1': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-read-from-component-consumer1-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' }, { - "app": { - "nameOverride": "resources-read-from-component-consumer2", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-consumer2-error", - "inputTopics": [ - "resources-read-from-component-producer1", - "resources-read-from-component-consumer1", + 'app': { + 'nameOverride': 'resources-read-from-component-consumer2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer2-error', + 'inputTopics': [ + 'resources-read-from-component-producer1', + 'resources-read-from-component-consumer1' ], - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - "consumer1": { - "type": "input", - }, - "producer1": { - "type": "input", - }, - }, - "topics": { - }, - }, - "name": "consumer2", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-consumer2-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + 'consumer1': { + 'type': 'input' + }, + 'producer1': { + 'type': 'input' + } + }, + 'topics': { + } + }, + 'name': 'consumer2', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer2-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' }, { - "app": { - "nameOverride": "resources-read-from-component-consumer3", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-consumer3-error", - "inputTopics": [ - "resources-read-from-component-producer1", - "resources-read-from-component-producer2", + 'app': { + 'nameOverride': 'resources-read-from-component-consumer3', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer3-error', + 'inputTopics': [ + 'resources-read-from-component-producer1', + 'resources-read-from-component-producer2' ], - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - "producer2": { - "type": "input", - }, - }, - "topics": { - "resources-read-from-component-producer1": { - "type": "input", - }, - }, - }, - "name": "consumer3", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-consumer3-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + 'producer2': { + 'type': 'input' + } + }, + 'topics': { + 'resources-read-from-component-producer1': { + 'type': 'input' + } + } + }, + 'name': 'consumer3', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer3-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' }, { - "app": { - "nameOverride": "resources-read-from-component-consumer4", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-consumer4-error", - "inputTopics": [ - "resources-read-from-component-inflate-step-inflate-step-inflated-streams-app", + 'app': { + 'nameOverride': 'resources-read-from-component-consumer4', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer4-error', + 'inputTopics': [ + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' ], - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - "inflate-step": { - "type": "input", - }, - }, - "topics": { - }, - }, - "name": "consumer4", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-consumer4-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + 'inflate-step': { + 'type': 'input' + } + }, + 'topics': { + } + }, + 'name': 'consumer4', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer4-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' }, { - "app": { - "nameOverride": "resources-read-from-component-consumer5", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-read-from-component-consumer5-error", - "inputTopics": [ - "inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app", + 'app': { + 'nameOverride': 'resources-read-from-component-consumer5', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer5-error', + 'inputTopics': [ + 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app' ], - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - "inflate-step-without-prefix": { - "type": "input", - }, - }, - "topics": { - }, - }, - "name": "consumer5", - "namespace": "example-namespace", - "prefix": "resources-read-from-component-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-read-from-component-consumer5-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.4.2", - }, - ], + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + 'inflate-step-without-prefix': { + 'type': 'input' + } + }, + 'topics': { + } + }, + 'name': 'consumer5', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer5-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } + ] } -snapshots["TestPipeline.test_substitute_in_component test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_substitute_in_component test-pipeline'] = { + 'components': [ { - "app": { - "commandLine": { - "FAKE_ARG": "fake-arg-value", - }, - "image": "example-registry/fake-image", - "imageTag": "0.0.1", - "labels": { - "app_name": "scheduled-producer", - "app_schedule": "30 3/8 * * *", - "app_type": "scheduled-producer", - }, - "nameOverride": "resources-component-type-substitution-scheduled-producer", - "schedule": "30 3/8 * * *", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "resources-component-type-substitution-scheduled-producer", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "scheduled-producer", - "namespace": "example-namespace", - "prefix": "resources-component-type-substitution-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - "com/bakdata/kafka/fake": "1.0.0", - }, - "topics": { - "resources-component-type-substitution-scheduled-producer": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 12, - "type": "output", - "value_schema": "com.bakdata.fake.Produced", - }, - }, - }, - "type": "scheduled-producer", - "version": "2.4.2", + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'labels': { + 'app_name': 'scheduled-producer', + 'app_schedule': '30 3/8 * * *', + 'app_type': 'scheduled-producer' + }, + 'nameOverride': 'resources-component-type-substitution-scheduled-producer', + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-component-type-substitution-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' + }, + 'topics': { + 'resources-component-type-substitution-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' + } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "converter-resources-component-type-substitution-converter", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 1, - "minReplicas": 0, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-component-type-substitution-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-component-type-substitution-converter', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-component-type-substitution-converter-error', + 'inputTopics': [ + 'resources-component-type-substitution-scheduled-producer' ], - }, - "commandLine": { - "CONVERT_XML": True, - }, - "nameOverride": "resources-component-type-substitution-converter", - "resources": { - "limits": { - "memory": "2G", - }, - "requests": { - "memory": "2G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-component-type-substitution-converter-error", - "inputTopics": [ - "resources-component-type-substitution-scheduled-producer", - ], - "outputTopic": "resources-component-type-substitution-converter", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "converter", - "namespace": "example-namespace", - "prefix": "resources-component-type-substitution-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-component-type-substitution-converter": { - "configs": { - "cleanup.policy": "compact,delete", - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-component-type-substitution-converter-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 10, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "converter", - "version": "2.4.2", + 'outputTopic': 'resources-component-type-substitution-converter', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-component-type-substitution-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-component-type-substitution-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'converter', + 'version': '2.4.2' }, { - "app": { - "autoscaling": { - "consumerGroup": "filter-resources-component-type-substitution-filter-app", - "cooldownPeriod": 300, - "enabled": True, - "lagThreshold": 10000, - "maxReplicas": 4, - "minReplicas": 4, - "offsetResetPolicy": "earliest", - "pollingInterval": 30, - "topics": [ - "resources-component-type-substitution-filter-app", - ], - }, - "commandLine": { - "TYPE": "nothing", - }, - "image": "fake-registry/filter", - "imageTag": "2.4.1", - "labels": { - "app_name": "filter-app", - "app_resources_requests_memory": "3G", - "app_type": "filter", - "filter": "filter-app-filter", - "test_placeholder_in_placeholder": "filter-app-filter", - }, - "nameOverride": "resources-component-type-substitution-filter-app", - "replicaCount": 4, - "resources": { - "requests": { - "memory": "3G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-component-type-substitution-filter-app-error", - "inputTopics": [ - "resources-component-type-substitution-converter", + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-component-type-substitution-filter-app', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-component-type-substitution-filter-app' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'labels': { + 'app_name': 'filter-app', + 'app_resources_requests_memory': '3G', + 'app_type': 'filter', + 'filter': 'filter-app-filter', + 'test_placeholder_in_placeholder': 'filter-app-filter' + }, + 'nameOverride': 'resources-component-type-substitution-filter-app', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-component-type-substitution-filter-app-error', + 'inputTopics': [ + 'resources-component-type-substitution-converter' ], - "outputTopic": "resources-component-type-substitution-filter-app", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "filter-app", - "namespace": "example-namespace", - "prefix": "resources-component-type-substitution-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "resources-component-type-substitution-filter-app": { - "configs": { - "retention.ms": "-1", - }, - "partitions_count": 50, - "type": "output", - }, - "resources-component-type-substitution-filter-app-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "filter", - "version": "2.4.2", - }, - ], + 'outputTopic': 'resources-component-type-substitution-filter-app', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'filter-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-component-type-substitution-filter-app': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-component-type-substitution-filter-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'filter', + 'version': '2.4.2' + } + ] } -snapshots["TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = { + 'components': [ { - "app": { - "nameOverride": "resources-custom-config-app1", - "resources": { - "limits": { - "memory": "2G", - }, - "requests": { - "memory": "2G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "app1-test-topic", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "app1", - "namespace": "development-namespace", - "prefix": "resources-custom-config-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "app1-test-topic": { - "configs": { - }, - "partitions_count": 3, - "type": "output", - }, - }, - }, - "type": "producer-app", - "version": "2.9.0", + 'app': { + 'nameOverride': 'resources-custom-config-app1', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'app1-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'app1-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' }, { - "app": { - "image": "some-image", - "labels": { - "pipeline": "resources-custom-config", - }, - "nameOverride": "resources-custom-config-app2", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "errorTopic": "app2-dead-letter-topic", - "inputTopics": [ - "app1-test-topic", + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'nameOverride': 'resources-custom-config-app2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'app2-dead-letter-topic', + 'inputTopics': [ + 'app1-test-topic' ], - "outputTopic": "app2-test-topic", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "app2", - "namespace": "development-namespace", - "prefix": "resources-custom-config-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "app2-dead-letter-topic": { - "configs": { - }, - "partitions_count": 1, - "type": "error", - }, - "app2-test-topic": { - "configs": { - }, - "partitions_count": 3, - "type": "output", - }, - }, - }, - "type": "streams-app", - "version": "2.9.0", - }, - ], + 'outputTopic': 'app2-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'app2-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + }, + 'app2-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } + ] } -snapshots["TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline'] = { + 'components': [ { - "app": { - "nameOverride": "resources-custom-config-app1", - "resources": { - "limits": { - "memory": "2G", - }, - "requests": { - "memory": "2G", - }, - }, - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "extraOutputTopics": { - }, - "outputTopic": "app1-test-topic", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "app1", - "namespace": "development-namespace", - "prefix": "resources-custom-config-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "app1-test-topic": { - "configs": { - }, - "partitions_count": 3, - "type": "output", - }, - }, - }, - "type": "producer-app", - "version": "2.9.0", + 'app': { + 'nameOverride': 'resources-custom-config-app1', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'app1-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'app1-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' }, { - "app": { - "image": "some-image", - "labels": { - "pipeline": "resources-custom-config", - }, - "nameOverride": "resources-custom-config-app2", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "errorTopic": "app2-dead-letter-topic", - "inputTopics": [ - "app1-test-topic", + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'nameOverride': 'resources-custom-config-app2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'app2-dead-letter-topic', + 'inputTopics': [ + 'app1-test-topic' ], - "outputTopic": "app2-test-topic", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "name": "app2", - "namespace": "development-namespace", - "prefix": "resources-custom-config-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "app2-dead-letter-topic": { - "configs": { - }, - "partitions_count": 1, - "type": "error", - }, - "app2-test-topic": { - "configs": { - }, - "partitions_count": 3, - "type": "output", - }, - }, - }, - "type": "streams-app", - "version": "2.9.0", - }, - ], + 'outputTopic': 'app2-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'app2-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + }, + 'app2-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } + ] } -snapshots["TestPipeline.test_with_env_defaults test-pipeline"] = { - "components": [ +snapshots['TestPipeline.test_with_env_defaults test-pipeline'] = { + 'components': [ { - "app": { - "image": "fake-image", - "nameOverride": "resources-kafka-connect-sink-streams-app-development", - "streams": { - "brokers": "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092", - "config": { - "large.message.id.generator": "com.bakdata.kafka.MurmurHashIdGenerator", - }, - "errorTopic": "resources-kafka-connect-sink-streams-app-development-error", - "inputTopics": [ - "example-topic", + 'app': { + 'image': 'fake-image', + 'nameOverride': 'resources-kafka-connect-sink-streams-app-development', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-kafka-connect-sink-streams-app-development-error', + 'inputTopics': [ + 'example-topic' ], - "outputTopic": "example-output", - "schemaRegistryUrl": "http://localhost:8081", - }, - }, - "from": { - "components": { - }, - "topics": { - "example-topic": { - "type": "input", - }, - }, - }, - "name": "streams-app-development", - "namespace": "development-namespace", - "prefix": "resources-kafka-connect-sink-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/", - }, - "to": { - "models": { - }, - "topics": { - "example-output": { - "configs": { - }, - "type": "output", - }, - "resources-kafka-connect-sink-streams-app-development-error": { - "configs": { - "cleanup.policy": "compact,delete", - }, - "partitions_count": 1, - "type": "error", - "value_schema": "com.bakdata.kafka.DeadLetter", - }, - }, - }, - "type": "streams-app", - "version": "2.9.0", + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081' + } + }, + 'from': { + 'components': { + }, + 'topics': { + 'example-topic': { + 'type': 'input' + } + } + }, + 'name': 'streams-app-development', + 'namespace': 'development-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-kafka-connect-sink-streams-app-development-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' }, { - "app": { - "batch.size": "2000", - "behavior.on.malformed.documents": "warn", - "behavior.on.null.values": "delete", - "connection.compression": "true", - "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", - "key.ignore": "false", - "linger.ms": "5000", - "max.buffered.records": "20000", - "name": "resources-kafka-connect-sink-es-sink-connector", - "read.timeout.ms": "120000", - "tasks.max": "1", - "topics": "example-output", - }, - "name": "es-sink-connector", - "namespace": "example-namespace", - "prefix": "resources-kafka-connect-sink-", - "repo_config": { - "repo_auth_flags": { - "insecure_skip_tls_verify": False, - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/", - }, - "resetter_values": { - }, - "type": "kafka-sink-connector", - "version": "1.0.4", - }, - ], + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-kafka-connect-sink-es-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'example-output' + }, + 'name': 'es-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + } + ] } From 2b6ac1ba1914262c16d380faac5397139d3a401e Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 2 Oct 2023 19:18:00 +0300 Subject: [PATCH 71/76] ci: better access to python version --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index beaf56544..f6cc5979c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -38,7 +38,7 @@ jobs: - name: Lint (ruff) shell: bash run: | - if [[ "$RUNNER_OS" == "Linux" && $(python -V) == *"3.10"* ]]; + if [[ "$RUNNER_OS" == "Linux" && "${{ matrix.python-version }}" == "3.10" ]]; then echo "::add-matcher::.github/ruff-matcher.json"; poetry run pre-commit run --hook-stage manual ruff-ci --all-files --show-diff-on-failure; From d1aeac5d810f110026b230d62c9e7013af7e023b Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 2 Oct 2023 19:25:21 +0300 Subject: [PATCH 72/76] ci: remove ruff-ci pre-commit hook --- .github/workflows/ci.yaml | 9 ++++----- .pre-commit-config.yaml | 9 --------- 2 files changed, 4 insertions(+), 14 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f6cc5979c..fe87e8436 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -38,14 +38,13 @@ jobs: - name: Lint (ruff) shell: bash run: | - if [[ "$RUNNER_OS" == "Linux" && "${{ matrix.python-version }}" == "3.10" ]]; + if [[ "$RUNNER_OS" == "Linux" && "${{ matrix.python-version }}" == "3.10" ]] then - echo "::add-matcher::.github/ruff-matcher.json"; - poetry run pre-commit run --hook-stage manual ruff-ci --all-files --show-diff-on-failure; + echo "::add-matcher::.github/ruff-matcher.json" + poetry run ruff check . --config pyproject.toml --output-format text --no-fix else - poetry run pre-commit run ruff --all-files --show-diff-on-failure; + poetry run pre-commit run ruff --all-files --show-diff-on-failure fi; - - name: Formatting (black) run: poetry run pre-commit run black --all-files --show-diff-on-failure diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b132953fd..11bb390bb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,15 +9,6 @@ repos: types_or: [python] require_serial: true # run once for all files pass_filenames: false - - id: ruff-ci - name: ruff-ci - entry: ruff - args: [ check, ".", --config, pyproject.toml, --output-format, text, --no-fix] - language: system - types_or: [python] - require_serial: true # run once for all files - pass_filenames: false - stages: [manual] - repo: local hooks: - id: black From c56ef96698e992396708f6da6216daf63e5e6548 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 2 Oct 2023 21:04:23 +0300 Subject: [PATCH 73/76] ci: remove repeating repo definitions pre-commit --- .pre-commit-config.yaml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 11bb390bb..8c709b20a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,8 +9,6 @@ repos: types_or: [python] require_serial: true # run once for all files pass_filenames: false - - repo: local - hooks: - id: black name: black entry: black @@ -18,8 +16,6 @@ repos: types_or: [python, pyi] require_serial: true # run once for all files exclude: ^tests/.*snapshots/ - - repo: local - hooks: - id: pyright name: pyright entry: pyright @@ -27,8 +23,6 @@ repos: types: [python] require_serial: true # run once for all files exclude: ^tests/.*snapshots/ - - repo: local - hooks: - id: gen-schema name: gen-schema entry: python hooks/gen_schema.py From a2c5a7f1da43584117fc07b7dd88d41b36c86c86 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 2 Oct 2023 21:25:44 +0300 Subject: [PATCH 74/76] chore: typing --- tests/component_handlers/schema_handler/test_schema_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 8f5b0f29e..faf54ba09 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -141,7 +141,7 @@ def test_should_raise_value_error_if_schema_provider_class_not_found(): ], ) def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty( - components_module, + components_module: str, ): config_enable = PipelineConfig( defaults_path=Path("fake"), From 1d7a04b30a71e4114b329b4d6db394dbe871768b Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 2 Oct 2023 21:42:21 +0300 Subject: [PATCH 75/76] ci: update ruff --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7a5328922..9a50b9ae1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1385,28 +1385,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.0.291" +version = "0.0.292" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.291-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b97d0d7c136a85badbc7fd8397fdbb336e9409b01c07027622f28dcd7db366f2"}, - {file = "ruff-0.0.291-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6ab44ea607967171e18aa5c80335237be12f3a1523375fa0cede83c5cf77feb4"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a04b384f2d36f00d5fb55313d52a7d66236531195ef08157a09c4728090f2ef0"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b727c219b43f903875b7503a76c86237a00d1a39579bb3e21ce027eec9534051"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87671e33175ae949702774071b35ed4937da06f11851af75cd087e1b5a488ac4"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b75f5801547f79b7541d72a211949754c21dc0705c70eddf7f21c88a64de8b97"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b09b94efdcd162fe32b472b2dd5bf1c969fcc15b8ff52f478b048f41d4590e09"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d5b56bc3a2f83a7a1d7f4447c54d8d3db52021f726fdd55d549ca87bca5d747"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13f0d88e5f367b2dc8c7d90a8afdcfff9dd7d174e324fd3ed8e0b5cb5dc9b7f6"}, - {file = "ruff-0.0.291-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b3eeee1b1a45a247758ecdc3ab26c307336d157aafc61edb98b825cadb153df3"}, - {file = "ruff-0.0.291-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6c06006350c3bb689765d71f810128c9cdf4a1121fd01afc655c87bab4fb4f83"}, - {file = "ruff-0.0.291-py3-none-musllinux_1_2_i686.whl", hash = "sha256:fd17220611047de247b635596e3174f3d7f2becf63bd56301fc758778df9b629"}, - {file = "ruff-0.0.291-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5383ba67ad360caf6060d09012f1fb2ab8bd605ab766d10ca4427a28ab106e0b"}, - {file = "ruff-0.0.291-py3-none-win32.whl", hash = "sha256:1d5f0616ae4cdc7a938b493b6a1a71c8a47d0300c0d65f6e41c281c2f7490ad3"}, - {file = "ruff-0.0.291-py3-none-win_amd64.whl", hash = "sha256:8a69bfbde72db8ca1c43ee3570f59daad155196c3fbe357047cd9b77de65f15b"}, - {file = "ruff-0.0.291-py3-none-win_arm64.whl", hash = "sha256:d867384a4615b7f30b223a849b52104214442b5ba79b473d7edd18da3cde22d6"}, - {file = "ruff-0.0.291.tar.gz", hash = "sha256:c61109661dde9db73469d14a82b42a88c7164f731e6a3b0042e71394c1c7ceed"}, + {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, + {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, + {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, + {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, + {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, + {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, ] [[package]] @@ -1769,4 +1769,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "3b046d340a48c32ea5c9d7fa6e6687a15173dd97da4c0aa982cc1d48b778012e" +content-hash = "056b014fc985bda3ac9d33518eae39b228f776e84307ee4ddd28bd330a1c36e6" diff --git a/pyproject.toml b/pyproject.toml index 9e404234d..f29e7ae65 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ pytest-mock = "^3.10.0" pytest-timeout = "^2.1.0" snapshottest = "^0.6.0" pre-commit = "^2.19.0" -ruff = "^0.0.291" +ruff = "^0.0.292" black = "^23.7.0" typer-cli = "^0.0.13" pyright = "^1.1.314" From 5aa59364e483acb7d3bedf39c4fbbcae0f528055 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Fri, 6 Oct 2023 16:14:16 +0300 Subject: [PATCH 76/76] ci: remove future import that breaks ruff --- kpops/components/streams_bootstrap/producer/producer_app.py | 2 +- kpops/components/streams_bootstrap/streams/streams_app.py | 2 -- pyproject.toml | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 08e621019..6091cdd77 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -1,4 +1,4 @@ -from __future__ import annotations +# from __future__ import annotations from pydantic import Field from typing_extensions import override diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index fb65d9ab8..a466b4eba 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from pydantic import Field from typing_extensions import override diff --git a/pyproject.toml b/pyproject.toml index f29e7ae65..7e0427eda 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -159,7 +159,7 @@ split-on-trailing-comma = false extend-immutable-calls = ["typer.Argument"] [tool.ruff.flake8-type-checking] -runtime-evaluated-base-classes = ["pydantic.BaseModel", "kpops.components.base_components.kafka_app.KafkaApp"] +runtime-evaluated-base-classes = ["pydantic.BaseModel"] [build-system] requires = ["poetry-core>=1.0.0"]