diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4cccf55028..157bc7df5f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -49,7 +49,7 @@ repos: ## version = re.search('ruff==([0-9\.]*)', open("constraints.txt").read())[1] ## print(f"rev: v{version}") ##]]] - rev: v0.2.0 + rev: v0.3.5 ##[[[end]]] hooks: # Run the linter. @@ -66,9 +66,9 @@ repos: ## version = re.search('mypy==([0-9\.]*)', open("constraints.txt").read())[1] ## print(f"#========= FROM constraints.txt: v{version} =========") ##]]] - #========= FROM constraints.txt: v1.8.0 ========= + #========= FROM constraints.txt: v1.9.0 ========= ##[[[end]]] - rev: v1.8.0 # MUST match version ^^^^ in constraints.txt (if the mirror is up-to-date) + rev: v1.9.0 # MUST match version ^^^^ in constraints.txt (if the mirror is up-to-date) hooks: - id: mypy additional_dependencies: # versions from constraints.txt @@ -86,27 +86,27 @@ repos: ##]]] - astunparse==1.6.3 - attrs==23.2.0 - - black==24.2.0 - - boltons==23.1.1 + - black==24.3.0 + - boltons==24.0.0 - cached-property==1.5.2 - click==8.1.7 - - cmake==3.28.3 + - cmake==3.29.0.1 - cytoolz==0.12.3 - deepdiff==6.7.1 - devtools==0.12.2 - factory-boy==3.3.0 - - frozendict==2.4.0 + - frozendict==2.4.1 - gridtools-cpp==2.3.2 - - importlib-resources==6.1.2 + - importlib-resources==6.4.0 - jinja2==3.1.3 - lark==1.1.9 - mako==1.3.2 - nanobind==1.9.2 - ninja==1.11.1.1 - numpy==1.24.4 - - packaging==23.2 - - pybind11==2.11.1 - - setuptools==69.1.1 + - packaging==24.0 + - pybind11==2.12.0 + - setuptools==69.2.0 - tabulate==0.9.0 - typing-extensions==4.5.0 - xxhash==3.0.0 diff --git a/constraints.txt b/constraints.txt index 5ec67bb810..15a8310e6a 100644 --- a/constraints.txt +++ b/constraints.txt @@ -6,15 +6,17 @@ # aenum==3.1.15 # via dace alabaster==0.7.13 # via sphinx +annotated-types==0.6.0 # via pydantic asttokens==2.4.1 # via devtools, stack-data astunparse==1.6.3 ; python_version < "3.9" # via dace, gt4py (pyproject.toml) attrs==23.2.0 # via flake8-bugbear, flake8-eradicate, gt4py (pyproject.toml), hypothesis, jsonschema, referencing babel==2.14.0 # via sphinx backcall==0.2.0 # via ipython -black==24.2.0 # via gt4py (pyproject.toml) +black==24.3.0 # via gt4py (pyproject.toml) blinker==1.7.0 # via flask -boltons==23.1.1 # via gt4py (pyproject.toml) -build==1.0.3 # via pip-tools +boltons==24.0.0 # via gt4py (pyproject.toml) +build==1.2.1 # via pip-tools +bump-my-version==0.12.0 # via -r requirements-dev.in cached-property==1.5.2 # via gt4py (pyproject.toml) cachetools==5.3.3 # via tox certifi==2024.2.2 # via requests @@ -22,14 +24,14 @@ cffi==1.16.0 # via cryptography cfgv==3.4.0 # via pre-commit chardet==5.2.0 # via tox charset-normalizer==3.3.2 # via requests -clang-format==17.0.6 # via -r requirements-dev.in, gt4py (pyproject.toml) -click==8.1.7 # via black, flask, gt4py (pyproject.toml), pip-tools -cmake==3.28.3 # via gt4py (pyproject.toml) -cogapp==3.3.0 # via -r requirements-dev.in +clang-format==18.1.2 # via -r requirements-dev.in, gt4py (pyproject.toml) +click==8.1.7 # via black, bump-my-version, flask, gt4py (pyproject.toml), pip-tools, rich-click +cmake==3.29.0.1 # via gt4py (pyproject.toml) +cogapp==3.4.1 # via -r requirements-dev.in colorama==0.4.6 # via tox -comm==0.2.1 # via ipykernel +comm==0.2.2 # via ipykernel contourpy==1.1.1 # via matplotlib -coverage==7.4.3 # via -r requirements-dev.in, pytest-cov +coverage==7.4.4 # via -r requirements-dev.in, pytest-cov cryptography==42.0.5 # via types-paramiko, types-pyopenssl, types-redis cycler==0.12.1 # via matplotlib cytoolz==0.12.3 # via gt4py (pyproject.toml) @@ -47,12 +49,12 @@ exceptiongroup==1.2.0 # via hypothesis, pytest execnet==2.0.2 # via pytest-cache, pytest-xdist executing==2.0.1 # via devtools, stack-data factory-boy==3.3.0 # via gt4py (pyproject.toml), pytest-factoryboy -faker==23.2.1 # via factory-boy +faker==24.4.0 # via factory-boy fastjsonschema==2.19.1 # via nbformat -filelock==3.13.1 # via tox, virtualenv +filelock==3.13.3 # via tox, virtualenv flake8==7.0.0 # via -r requirements-dev.in, flake8-bugbear, flake8-builtins, flake8-debugger, flake8-docstrings, flake8-eradicate, flake8-mutable, flake8-pyproject, flake8-rst-docstrings flake8-bugbear==24.2.6 # via -r requirements-dev.in -flake8-builtins==2.2.0 # via -r requirements-dev.in +flake8-builtins==2.4.0 # via -r requirements-dev.in flake8-debugger==4.1.2 # via -r requirements-dev.in flake8-docstrings==1.7.0 # via -r requirements-dev.in flake8-eradicate==1.5.0 # via -r requirements-dev.in @@ -60,19 +62,19 @@ flake8-mutable==1.2.0 # via -r requirements-dev.in flake8-pyproject==1.2.3 # via -r requirements-dev.in flake8-rst-docstrings==0.3.0 # via -r requirements-dev.in flask==3.0.2 # via dace -fonttools==4.49.0 # via matplotlib +fonttools==4.50.0 # via matplotlib fparser==0.1.4 # via dace -frozendict==2.4.0 # via gt4py (pyproject.toml) +frozendict==2.4.1 # via gt4py (pyproject.toml) gridtools-cpp==2.3.2 # via gt4py (pyproject.toml) -hypothesis==6.98.13 # via -r requirements-dev.in, gt4py (pyproject.toml) +hypothesis==6.100.0 # via -r requirements-dev.in, gt4py (pyproject.toml) identify==2.5.35 # via pre-commit idna==3.6 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==7.0.1 # via build, flask, jax, jupyter-client, sphinx -importlib-resources==6.1.2 ; python_version < "3.9" # via gt4py (pyproject.toml), jsonschema, jsonschema-specifications, matplotlib +importlib-metadata==7.1.0 # via build, flask, jax, jupyter-client, sphinx +importlib-resources==6.4.0 ; python_version < "3.9" # via gt4py (pyproject.toml), jsonschema, jsonschema-specifications, matplotlib inflection==0.5.1 # via pytest-factoryboy iniconfig==2.0.0 # via pytest -ipykernel==6.29.3 # via nbmake +ipykernel==6.29.4 # via nbmake ipython==8.12.3 # via ipykernel isort==5.13.2 # via -r requirements-dev.in itsdangerous==2.1.2 # via flask @@ -82,13 +84,13 @@ jedi==0.19.1 # via ipython jinja2==3.1.3 # via flask, gt4py (pyproject.toml), sphinx jsonschema==4.21.1 # via nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.0 # via ipykernel, nbclient -jupyter-core==5.7.1 # via ipykernel, jupyter-client, nbformat +jupyter-client==8.6.1 # via ipykernel, nbclient +jupyter-core==5.7.2 # via ipykernel, jupyter-client, nbformat jupytext==1.16.1 # via -r requirements-dev.in kiwisolver==1.4.5 # via matplotlib lark==1.1.9 # via gt4py (pyproject.toml) mako==1.3.2 # via gt4py (pyproject.toml) -markdown-it-py==3.0.0 # via jupytext, mdit-py-plugins +markdown-it-py==3.0.0 # via jupytext, mdit-py-plugins, rich markupsafe==2.1.5 # via jinja2, mako, werkzeug matplotlib==3.7.5 # via -r requirements-dev.in matplotlib-inline==0.1.6 # via ipykernel, ipython @@ -97,12 +99,12 @@ mdit-py-plugins==0.4.0 # via jupytext mdurl==0.1.2 # via markdown-it-py ml-dtypes==0.2.0 # via jax, jaxlib mpmath==1.3.0 # via sympy -mypy==1.8.0 # via -r requirements-dev.in +mypy==1.9.0 # via -r requirements-dev.in mypy-extensions==1.0.0 # via black, mypy nanobind==1.9.2 # via gt4py (pyproject.toml) nbclient==0.6.8 # via nbmake -nbformat==5.9.2 # via jupytext, nbclient, nbmake -nbmake==1.5.0 # via -r requirements-dev.in +nbformat==5.10.3 # via jupytext, nbclient, nbmake +nbmake==1.5.3 # via -r requirements-dev.in nest-asyncio==1.6.0 # via ipykernel, nbclient networkx==3.1 # via dace ninja==1.11.1.1 # via gt4py (pyproject.toml) @@ -110,14 +112,14 @@ nodeenv==1.8.0 # via pre-commit numpy==1.24.4 # via contourpy, dace, gt4py (pyproject.toml), jax, jaxlib, matplotlib, ml-dtypes, opt-einsum, scipy, types-jack-client opt-einsum==3.3.0 # via jax ordered-set==4.1.0 # via deepdiff -packaging==23.2 # via black, build, gt4py (pyproject.toml), ipykernel, jupytext, matplotlib, pyproject-api, pytest, setuptools-scm, sphinx, tox +packaging==24.0 # via black, build, gt4py (pyproject.toml), ipykernel, jupytext, matplotlib, pyproject-api, pytest, pytest-factoryboy, setuptools-scm, sphinx, tox parso==0.8.3 # via jedi pathspec==0.12.1 # via black pexpect==4.9.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.2.0 # via matplotlib -pip-tools==7.4.0 # via -r requirements-dev.in -pipdeptree==2.15.1 # via -r requirements-dev.in +pillow==10.3.0 # via matplotlib +pip-tools==7.4.1 # via -r requirements-dev.in +pipdeptree==2.16.2 # via -r requirements-dev.in pkgutil-resolve-name==1.3.10 # via jsonschema platformdirs==4.2.0 # via black, jupyter-core, tox, virtualenv pluggy==1.4.0 # via pytest, tox @@ -127,29 +129,35 @@ prompt-toolkit==3.0.43 # via ipython psutil==5.9.8 # via -r requirements-dev.in, ipykernel, pytest-xdist ptyprocess==0.7.0 # via pexpect pure-eval==0.2.2 # via stack-data -pybind11==2.11.1 # via gt4py (pyproject.toml) +pybind11==2.12.0 # via gt4py (pyproject.toml) pycodestyle==2.11.1 # via flake8, flake8-debugger -pycparser==2.21 # via cffi +pycparser==2.22 # via cffi +pydantic==2.0a3 # via bump-my-version, pydantic-settings +pydantic-core==0.25.0 # via pydantic +pydantic-settings==1.99 # via bump-my-version pydocstyle==6.3.0 # via flake8-docstrings pyflakes==3.2.0 # via flake8 -pygments==2.17.2 # via -r requirements-dev.in, devtools, flake8-rst-docstrings, ipython, nbmake, sphinx -pyparsing==3.1.1 # via matplotlib +pygments==2.17.2 # via -r requirements-dev.in, devtools, flake8-rst-docstrings, ipython, nbmake, rich, sphinx +pyparsing==3.1.2 # via matplotlib pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build, pip-tools -pytest==8.0.2 # via -r requirements-dev.in, gt4py (pyproject.toml), nbmake, pytest-cache, pytest-cov, pytest-factoryboy, pytest-xdist +pytest==8.1.1 # via -r requirements-dev.in, gt4py (pyproject.toml), nbmake, pytest-cache, pytest-cov, pytest-factoryboy, pytest-xdist pytest-cache==1.0 # via -r requirements-dev.in -pytest-cov==4.1.0 # via -r requirements-dev.in -pytest-factoryboy==2.6.0 # via -r requirements-dev.in +pytest-cov==5.0.0 # via -r requirements-dev.in +pytest-factoryboy==2.7.0 # via -r requirements-dev.in pytest-xdist==3.5.0 # via -r requirements-dev.in -python-dateutil==2.8.2 # via faker, jupyter-client, matplotlib +python-dateutil==2.9.0.post0 # via faker, jupyter-client, matplotlib +python-dotenv==1.0.1 # via pydantic-settings pytz==2024.1 # via babel pyyaml==6.0.1 # via dace, jupytext, pre-commit pyzmq==25.1.2 # via ipykernel, jupyter-client -referencing==0.33.0 # via jsonschema, jsonschema-specifications +referencing==0.34.0 # via jsonschema, jsonschema-specifications requests==2.31.0 # via dace, sphinx restructuredtext-lint==1.4.0 # via flake8-rst-docstrings +rich==13.7.1 # via bump-my-version, rich-click +rich-click==1.7.4 # via bump-my-version rpds-py==0.18.0 # via jsonschema, referencing -ruff==0.2.2 # via -r requirements-dev.in +ruff==0.3.5 # via -r requirements-dev.in scipy==1.10.1 # via gt4py (pyproject.toml), jax, jaxlib setuptools-scm==8.0.4 # via fparser six==1.16.0 # via asttokens, astunparse, python-dateutil @@ -169,36 +177,37 @@ sympy==1.9 # via dace, gt4py (pyproject.toml) tabulate==0.9.0 # via gt4py (pyproject.toml) toml==0.10.2 # via jupytext tomli==2.0.1 ; python_version < "3.11" # via -r requirements-dev.in, black, build, coverage, flake8-pyproject, mypy, pip-tools, pyproject-api, pyproject-hooks, pytest, setuptools-scm, tox +tomlkit==0.12.4 # via bump-my-version toolz==0.12.1 # via cytoolz tornado==6.4 # via ipykernel, jupyter-client -tox==4.13.0 # via -r requirements-dev.in -traitlets==5.14.1 # via comm, ipykernel, ipython, jupyter-client, jupyter-core, matplotlib-inline, nbclient, nbformat -types-aiofiles==23.2.0.20240106 # via types-all +tox==4.14.2 # via -r requirements-dev.in +traitlets==5.14.2 # via comm, ipykernel, ipython, jupyter-client, jupyter-core, matplotlib-inline, nbclient, nbformat +types-aiofiles==23.2.0.20240331 # via types-all types-all==1.0.0 # via -r requirements-dev.in types-annoy==1.17.8.4 # via types-all types-atomicwrites==1.4.5.1 # via types-all types-backports==0.1.3 # via types-all types-backports-abc==0.5.2 # via types-all -types-bleach==6.1.0.20240222 # via types-all +types-bleach==6.1.0.20240331 # via types-all types-boto==2.49.18.20240205 # via types-all types-cachetools==5.3.0.7 # via types-all types-certifi==2021.10.8.3 # via types-all -types-cffi==1.16.0.20240106 # via types-jack-client +types-cffi==1.16.0.20240331 # via types-jack-client types-characteristic==14.3.7 # via types-all types-chardet==5.0.4.6 # via types-all types-click==7.1.8 # via types-all, types-flask -types-click-spinner==0.1.13.20240106 # via types-all -types-colorama==0.4.15.20240205 # via types-all +types-click-spinner==0.1.13.20240311 # via types-all +types-colorama==0.4.15.20240311 # via types-all types-contextvars==2.4.7.3 # via types-all -types-croniter==2.0.0.20240106 # via types-all +types-croniter==2.0.0.20240321 # via types-all types-cryptography==3.3.23.2 # via types-all, types-openssl-python, types-pyjwt types-dataclasses==0.6.6 # via types-all -types-dateparser==1.1.4.20240106 # via types-all +types-dateparser==1.1.4.20240331 # via types-all types-datetimerange==2.0.0.6 # via types-all -types-decorator==5.1.8.20240106 # via types-all -types-deprecated==1.2.9.20240106 # via types-all +types-decorator==5.1.8.20240310 # via types-all +types-deprecated==1.2.9.20240311 # via types-all types-docopt==0.6.11.4 # via types-all -types-docutils==0.20.0.20240227 # via types-all +types-docutils==0.20.0.20240331 # via types-all types-emoji==2.1.0.3 # via types-all types-enum34==1.1.8 # via types-all types-fb303==1.0.0 # via types-all, types-scribe @@ -209,53 +218,53 @@ types-freezegun==1.1.10 # via types-all types-frozendict==2.0.9 # via types-all types-futures==3.3.8 # via types-all types-geoip2==3.0.0 # via types-all -types-html5lib==1.1.11.20240222 # via types-bleach +types-html5lib==1.1.11.20240228 # via types-bleach types-ipaddress==1.0.8 # via types-all, types-maxminddb types-itsdangerous==1.1.6 # via types-all types-jack-client==0.5.10.20240106 # via types-all types-jinja2==2.11.9 # via types-all, types-flask types-kazoo==0.1.3 # via types-all -types-markdown==3.5.0.20240129 # via types-all +types-markdown==3.6.0.20240316 # via types-all types-markupsafe==1.1.10 # via types-all, types-jinja2 types-maxminddb==1.5.0 # via types-all, types-geoip2 -types-mock==5.1.0.20240106 # via types-all -types-mypy-extensions==1.0.0.5 # via types-all +types-mock==5.1.0.20240311 # via types-all +types-mypy-extensions==1.0.0.20240311 # via types-all types-nmap==0.1.6 # via types-all types-openssl-python==0.1.3 # via types-all types-orjson==3.6.2 # via types-all -types-paramiko==3.4.0.20240205 # via types-all, types-pysftp +types-paramiko==3.4.0.20240311 # via types-all, types-pysftp types-pathlib2==2.3.0 # via types-all -types-pillow==10.2.0.20240213 # via types-all +types-pillow==10.2.0.20240331 # via types-all types-pkg-resources==0.1.3 # via types-all -types-polib==1.2.0.20240115 # via types-all -types-protobuf==4.24.0.20240129 # via types-all +types-polib==1.2.0.20240327 # via types-all +types-protobuf==4.24.0.20240311 # via types-all types-pyaudio==0.2.16.20240106 # via types-all -types-pycurl==7.45.2.20240106 # via types-all -types-pyfarmhash==0.3.1.2 # via types-all +types-pycurl==7.45.2.20240311 # via types-all +types-pyfarmhash==0.3.1.20240311 # via types-all types-pyjwt==1.7.1 # via types-all types-pymssql==2.1.0 # via types-all types-pymysql==1.1.0.1 # via types-all -types-pyopenssl==24.0.0.20240130 # via types-redis +types-pyopenssl==24.0.0.20240311 # via types-redis types-pyrfc3339==1.1.1.5 # via types-all types-pysftp==0.2.17.20240106 # via types-all -types-python-dateutil==2.8.19.20240106 # via types-all, types-datetimerange +types-python-dateutil==2.9.0.20240316 # via types-all, types-datetimerange types-python-gflags==3.1.7.3 # via types-all -types-python-slugify==8.0.2.20240127 # via types-all +types-python-slugify==8.0.2.20240310 # via types-all types-pytz==2024.1.0.20240203 # via types-all, types-tzlocal types-pyvmomi==8.0.0.6 # via types-all -types-pyyaml==6.0.12.12 # via types-all -types-redis==4.6.0.20240218 # via types-all -types-requests==2.31.0.20240218 # via types-all +types-pyyaml==6.0.12.20240311 # via types-all +types-redis==4.6.0.20240311 # via types-all +types-requests==2.31.0.20240402 # via types-all types-retry==0.9.9.4 # via types-all types-routes==2.5.0 # via types-all types-scribe==2.0.0 # via types-all -types-setuptools==69.1.0.20240223 # via types-cffi -types-simplejson==3.19.0.20240218 # via types-all +types-setuptools==69.2.0.20240317 # via types-cffi +types-simplejson==3.19.0.20240310 # via types-all types-singledispatch==4.1.0.0 # via types-all -types-six==1.16.21.20240106 # via types-all +types-six==1.16.21.20240311 # via types-all types-tabulate==0.9.0.20240106 # via types-all types-termcolor==1.1.6.2 # via types-all -types-toml==0.10.8.7 # via types-all +types-toml==0.10.8.20240310 # via types-all types-tornado==5.1.1 # via types-all types-typed-ast==1.5.8.7 # via types-all types-tzlocal==5.1.0.1 # via types-all @@ -263,16 +272,16 @@ types-ujson==5.9.0.0 # via types-all types-waitress==2.1.4.20240106 # via types-all types-werkzeug==1.0.9 # via types-all, types-flask types-xxhash==3.0.5.2 # via types-all -typing-extensions==4.5.0 # via black, faker, gt4py (pyproject.toml), ipython, mypy, pytest-factoryboy, setuptools-scm +typing-extensions==4.5.0 # via annotated-types, black, faker, gt4py (pyproject.toml), ipython, mypy, pydantic, pydantic-core, pytest-factoryboy, rich, rich-click, setuptools-scm urllib3==2.2.1 # via requests, types-requests virtualenv==20.25.1 # via pre-commit, tox wcwidth==0.2.13 # via prompt-toolkit websockets==12.0 # via dace -werkzeug==3.0.1 # via flask -wheel==0.42.0 # via astunparse, pip-tools +werkzeug==3.0.2 # via flask +wheel==0.43.0 # via astunparse, pip-tools xxhash==3.0.0 # via gt4py (pyproject.toml) -zipp==3.17.0 # via importlib-metadata, importlib-resources +zipp==3.18.1 # via importlib-metadata, importlib-resources # The following packages are considered to be unsafe in a requirements file: -pip==24.0 # via pip-tools -setuptools==69.1.1 # via gt4py (pyproject.toml), nodeenv, pip-tools, setuptools-scm +pip==24.0 # via pip-tools, pipdeptree +setuptools==69.2.0 # via gt4py (pyproject.toml), nodeenv, pip-tools, setuptools-scm diff --git a/docs/user/next/workshop/exercises/helpers.py b/docs/user/next/workshop/exercises/helpers.py index 7b74711977..ee6926b42f 100644 --- a/docs/user/next/workshop/exercises/helpers.py +++ b/docs/user/next/workshop/exercises/helpers.py @@ -97,272 +97,294 @@ def ripple_field(domain: gtx.Domain, *, allocator=None) -> MutableLocatedField: n_levels = 10 -e2c2v_table = np.asarray([ - [0, 1, 4, 6], # 0 - [0, 4, 1, 3], # 1 - [0, 3, 4, 2], # 2 - [1, 2, 5, 7], # 3 - [1, 5, 2, 4], # 4 - [1, 4, 5, 0], # 5 - [2, 0, 3, 8], # 6 - [2, 3, 5, 0], # 7 - [2, 5, 1, 3], # 8 - [3, 4, 0, 7], # 9 - [3, 7, 4, 6], # 10 - [3, 6, 7, 5], # 11 - [4, 5, 8, 1], # 12 - [4, 8, 7, 5], # 13 - [4, 7, 3, 8], # 14 - [5, 3, 6, 2], # 15 - [6, 5, 3, 8], # 16 - [8, 5, 6, 4], # 17 - [6, 7, 3, 1], # 18 - [6, 1, 7, 0], # 19 - [6, 0, 1, 8], # 20 - [7, 8, 2, 4], # 21 - [7, 2, 8, 1], # 22 - [7, 1, 2, 6], # 23 - [8, 6, 0, 5], # 24 - [8, 0, 6, 2], # 25 - [8, 2, 0, 6], # 26 -]) +e2c2v_table = np.asarray( + [ + [0, 1, 4, 6], # 0 + [0, 4, 1, 3], # 1 + [0, 3, 4, 2], # 2 + [1, 2, 5, 7], # 3 + [1, 5, 2, 4], # 4 + [1, 4, 5, 0], # 5 + [2, 0, 3, 8], # 6 + [2, 3, 5, 0], # 7 + [2, 5, 1, 3], # 8 + [3, 4, 0, 7], # 9 + [3, 7, 4, 6], # 10 + [3, 6, 7, 5], # 11 + [4, 5, 8, 1], # 12 + [4, 8, 7, 5], # 13 + [4, 7, 3, 8], # 14 + [5, 3, 6, 2], # 15 + [6, 5, 3, 8], # 16 + [8, 5, 6, 4], # 17 + [6, 7, 3, 1], # 18 + [6, 1, 7, 0], # 19 + [6, 0, 1, 8], # 20 + [7, 8, 2, 4], # 21 + [7, 2, 8, 1], # 22 + [7, 1, 2, 6], # 23 + [8, 6, 0, 5], # 24 + [8, 0, 6, 2], # 25 + [8, 2, 0, 6], # 26 + ] +) -e2c_table = np.asarray([ - [0, 15], - [0, 3], - [3, 2], - [1, 16], - [1, 4], - [0, 4], - [2, 17], - [2, 5], - [1, 5], - [3, 6], - [6, 9], - [9, 8], - [4, 7], - [7, 10], - [6, 10], - [5, 8], - [8, 11], - [7, 11], - [9, 12], - [12, 15], - [15, 14], - [10, 13], - [13, 16], - [12, 16], - [11, 14], - [14, 17], - [13, 17], -]) +e2c_table = np.asarray( + [ + [0, 15], + [0, 3], + [3, 2], + [1, 16], + [1, 4], + [0, 4], + [2, 17], + [2, 5], + [1, 5], + [3, 6], + [6, 9], + [9, 8], + [4, 7], + [7, 10], + [6, 10], + [5, 8], + [8, 11], + [7, 11], + [9, 12], + [12, 15], + [15, 14], + [10, 13], + [13, 16], + [12, 16], + [11, 14], + [14, 17], + [13, 17], + ] +) -e2v_table = np.asarray([ - [0, 1], - [0, 4], - [0, 3], - [1, 2], - [1, 5], - [1, 4], - [2, 0], - [2, 3], - [2, 5], - [3, 4], - [3, 7], - [3, 6], - [4, 5], - [4, 8], - [4, 7], - [5, 3], - [5, 6], - [5, 8], - [6, 7], - [6, 1], - [6, 0], - [7, 8], - [7, 2], - [7, 1], - [8, 6], - [8, 0], - [8, 2], -]) +e2v_table = np.asarray( + [ + [0, 1], + [0, 4], + [0, 3], + [1, 2], + [1, 5], + [1, 4], + [2, 0], + [2, 3], + [2, 5], + [3, 4], + [3, 7], + [3, 6], + [4, 5], + [4, 8], + [4, 7], + [5, 3], + [5, 6], + [5, 8], + [6, 7], + [6, 1], + [6, 0], + [7, 8], + [7, 2], + [7, 1], + [8, 6], + [8, 0], + [8, 2], + ] +) -e2c2e_table = np.asarray([ - [1, 5, 19, 20], - [0, 5, 2, 9], - [1, 9, 6, 7], - [4, 8, 22, 23], - [3, 8, 5, 12], - [0, 1, 4, 12], - [7, 2, 25, 26], - [6, 2, 8, 15], - [3, 4, 7, 15], - [1, 2, 10, 14], - [9, 14, 11, 18], - [10, 18, 15, 16], - [4, 5, 13, 17], - [12, 17, 14, 21], - [9, 10, 13, 21], - [7, 8, 16, 11], - [15, 11, 17, 24], - [12, 13, 16, 24], - [10, 11, 19, 23], - [18, 23, 20, 0], - [19, 0, 24, 25], - [13, 14, 22, 26], - [21, 26, 23, 3], - [18, 19, 22, 3], - [16, 17, 25, 20], - [24, 20, 26, 6], - [25, 6, 21, 22], -]) +e2c2e_table = np.asarray( + [ + [1, 5, 19, 20], + [0, 5, 2, 9], + [1, 9, 6, 7], + [4, 8, 22, 23], + [3, 8, 5, 12], + [0, 1, 4, 12], + [7, 2, 25, 26], + [6, 2, 8, 15], + [3, 4, 7, 15], + [1, 2, 10, 14], + [9, 14, 11, 18], + [10, 18, 15, 16], + [4, 5, 13, 17], + [12, 17, 14, 21], + [9, 10, 13, 21], + [7, 8, 16, 11], + [15, 11, 17, 24], + [12, 13, 16, 24], + [10, 11, 19, 23], + [18, 23, 20, 0], + [19, 0, 24, 25], + [13, 14, 22, 26], + [21, 26, 23, 3], + [18, 19, 22, 3], + [16, 17, 25, 20], + [24, 20, 26, 6], + [25, 6, 21, 22], + ] +) -e2c2eO_table = np.asarray([ - [0, 1, 5, 19, 20], - [0, 1, 5, 2, 9], - [1, 2, 9, 6, 7], - [3, 4, 8, 22, 23], - [3, 4, 8, 5, 12], - [0, 1, 5, 4, 12], - [6, 7, 2, 25, 26], - [6, 7, 2, 8, 15], - [3, 4, 8, 7, 15], - [1, 2, 9, 10, 14], - [9, 10, 14, 11, 18], - [10, 11, 18, 15, 16], - [4, 5, 12, 13, 17], - [12, 13, 17, 14, 21], - [9, 10, 14, 13, 21], - [7, 8, 15, 16, 11], - [15, 16, 11, 17, 24], - [12, 13, 17, 16, 24], - [10, 11, 18, 19, 23], - [18, 19, 23, 20, 0], - [19, 20, 0, 24, 25], - [13, 14, 21, 22, 26], - [21, 22, 26, 23, 3], - [18, 19, 23, 22, 3], - [16, 17, 24, 25, 20], - [24, 25, 20, 26, 6], - [25, 26, 6, 21, 22], -]) +e2c2eO_table = np.asarray( + [ + [0, 1, 5, 19, 20], + [0, 1, 5, 2, 9], + [1, 2, 9, 6, 7], + [3, 4, 8, 22, 23], + [3, 4, 8, 5, 12], + [0, 1, 5, 4, 12], + [6, 7, 2, 25, 26], + [6, 7, 2, 8, 15], + [3, 4, 8, 7, 15], + [1, 2, 9, 10, 14], + [9, 10, 14, 11, 18], + [10, 11, 18, 15, 16], + [4, 5, 12, 13, 17], + [12, 13, 17, 14, 21], + [9, 10, 14, 13, 21], + [7, 8, 15, 16, 11], + [15, 16, 11, 17, 24], + [12, 13, 17, 16, 24], + [10, 11, 18, 19, 23], + [18, 19, 23, 20, 0], + [19, 20, 0, 24, 25], + [13, 14, 21, 22, 26], + [21, 22, 26, 23, 3], + [18, 19, 23, 22, 3], + [16, 17, 24, 25, 20], + [24, 25, 20, 26, 6], + [25, 26, 6, 21, 22], + ] +) -c2e_table = np.asarray([ - [0, 1, 5], # cell 0 - [3, 4, 8], # cell 1 - [6, 7, 2], # cell 2 - [1, 2, 9], # cell 3 - [4, 5, 12], # cell 4 - [7, 8, 15], # cell 5 - [9, 10, 14], # cell 6 - [12, 13, 17], # cell 7 - [15, 16, 11], # cell 8 - [10, 11, 18], # cell 9 - [13, 14, 21], # cell 10 - [16, 17, 24], # cell 11 - [18, 19, 23], # cell 12 - [21, 22, 26], # cell 13 - [24, 25, 20], # cell 14 - [19, 20, 0], # cell 15 - [22, 23, 3], # cell 16 - [25, 26, 6], # cell 17 -]) +c2e_table = np.asarray( + [ + [0, 1, 5], # cell 0 + [3, 4, 8], # cell 1 + [6, 7, 2], # cell 2 + [1, 2, 9], # cell 3 + [4, 5, 12], # cell 4 + [7, 8, 15], # cell 5 + [9, 10, 14], # cell 6 + [12, 13, 17], # cell 7 + [15, 16, 11], # cell 8 + [10, 11, 18], # cell 9 + [13, 14, 21], # cell 10 + [16, 17, 24], # cell 11 + [18, 19, 23], # cell 12 + [21, 22, 26], # cell 13 + [24, 25, 20], # cell 14 + [19, 20, 0], # cell 15 + [22, 23, 3], # cell 16 + [25, 26, 6], # cell 17 + ] +) -v2c_table = np.asarray([ - [17, 14, 3, 0, 2, 15], - [0, 4, 1, 12, 16, 15], - [1, 5, 2, 16, 13, 17], - [3, 6, 9, 5, 8, 2], - [6, 10, 7, 4, 0, 3], - [7, 11, 8, 5, 1, 4], - [9, 12, 15, 8, 11, 14], - [12, 16, 13, 10, 6, 9], - [13, 17, 14, 11, 7, 10], -]) +v2c_table = np.asarray( + [ + [17, 14, 3, 0, 2, 15], + [0, 4, 1, 12, 16, 15], + [1, 5, 2, 16, 13, 17], + [3, 6, 9, 5, 8, 2], + [6, 10, 7, 4, 0, 3], + [7, 11, 8, 5, 1, 4], + [9, 12, 15, 8, 11, 14], + [12, 16, 13, 10, 6, 9], + [13, 17, 14, 11, 7, 10], + ] +) -v2e_table = np.asarray([ - [0, 1, 2, 6, 25, 20], - [3, 4, 5, 0, 23, 19], - [6, 7, 8, 3, 22, 26], - [9, 10, 11, 15, 7, 2], - [12, 13, 14, 9, 1, 5], - [15, 16, 17, 12, 4, 8], - [18, 19, 20, 24, 16, 11], - [21, 22, 23, 18, 10, 14], - [24, 25, 26, 21, 13, 17], -]) +v2e_table = np.asarray( + [ + [0, 1, 2, 6, 25, 20], + [3, 4, 5, 0, 23, 19], + [6, 7, 8, 3, 22, 26], + [9, 10, 11, 15, 7, 2], + [12, 13, 14, 9, 1, 5], + [15, 16, 17, 12, 4, 8], + [18, 19, 20, 24, 16, 11], + [21, 22, 23, 18, 10, 14], + [24, 25, 26, 21, 13, 17], + ] +) -diamond_table = np.asarray([ - [0, 1, 4, 6], # 0 - [0, 4, 1, 3], - [0, 3, 4, 2], - [1, 2, 5, 7], # 3 - [1, 5, 2, 4], - [1, 4, 5, 0], - [2, 0, 3, 8], # 6 - [2, 3, 0, 5], - [2, 5, 1, 3], - [3, 4, 0, 7], # 9 - [3, 7, 4, 6], - [3, 6, 5, 7], - [4, 5, 1, 8], # 12 - [4, 8, 5, 7], - [4, 7, 3, 8], - [5, 3, 2, 6], # 15 - [5, 6, 3, 8], - [5, 8, 4, 6], - [6, 7, 3, 1], # 18 - [6, 1, 7, 0], - [6, 0, 1, 8], - [7, 8, 4, 2], # 21 - [7, 2, 8, 1], - [7, 1, 6, 2], - [8, 6, 5, 0], # 24 - [8, 0, 6, 2], - [8, 2, 7, 0], -]) +diamond_table = np.asarray( + [ + [0, 1, 4, 6], # 0 + [0, 4, 1, 3], + [0, 3, 4, 2], + [1, 2, 5, 7], # 3 + [1, 5, 2, 4], + [1, 4, 5, 0], + [2, 0, 3, 8], # 6 + [2, 3, 0, 5], + [2, 5, 1, 3], + [3, 4, 0, 7], # 9 + [3, 7, 4, 6], + [3, 6, 5, 7], + [4, 5, 1, 8], # 12 + [4, 8, 5, 7], + [4, 7, 3, 8], + [5, 3, 2, 6], # 15 + [5, 6, 3, 8], + [5, 8, 4, 6], + [6, 7, 3, 1], # 18 + [6, 1, 7, 0], + [6, 0, 1, 8], + [7, 8, 4, 2], # 21 + [7, 2, 8, 1], + [7, 1, 6, 2], + [8, 6, 5, 0], # 24 + [8, 0, 6, 2], + [8, 2, 7, 0], + ] +) -c2e2cO_table = np.asarray([ - [15, 4, 3, 0], - [16, 5, 4, 1], - [17, 3, 5, 2], - [0, 6, 2, 3], - [1, 7, 0, 4], - [2, 8, 1, 5], - [3, 10, 9, 6], - [4, 11, 10, 7], - [5, 9, 11, 8], - [6, 12, 8, 9], - [7, 13, 6, 10], - [8, 14, 7, 11], - [9, 16, 15, 12], - [10, 17, 16, 13], - [11, 15, 17, 14], - [12, 0, 14, 15], - [13, 1, 12, 16], - [14, 2, 13, 17], -]) +c2e2cO_table = np.asarray( + [ + [15, 4, 3, 0], + [16, 5, 4, 1], + [17, 3, 5, 2], + [0, 6, 2, 3], + [1, 7, 0, 4], + [2, 8, 1, 5], + [3, 10, 9, 6], + [4, 11, 10, 7], + [5, 9, 11, 8], + [6, 12, 8, 9], + [7, 13, 6, 10], + [8, 14, 7, 11], + [9, 16, 15, 12], + [10, 17, 16, 13], + [11, 15, 17, 14], + [12, 0, 14, 15], + [13, 1, 12, 16], + [14, 2, 13, 17], + ] +) -c2e2c_table = np.asarray([ - [15, 4, 3], - [16, 5, 4], - [17, 3, 5], - [0, 6, 2], - [1, 7, 0], - [2, 8, 1], - [3, 10, 9], - [4, 11, 10], - [5, 9, 11], - [6, 12, 8], - [7, 13, 6], - [8, 14, 7], - [9, 16, 15], - [10, 17, 16], - [11, 15, 17], - [12, 0, 14], - [13, 1, 12], - [14, 2, 13], -]) +c2e2c_table = np.asarray( + [ + [15, 4, 3], + [16, 5, 4], + [17, 3, 5], + [0, 6, 2], + [1, 7, 0], + [2, 8, 1], + [3, 10, 9], + [4, 11, 10], + [5, 9, 11], + [6, 12, 8], + [7, 13, 6], + [8, 14, 7], + [9, 16, 15], + [10, 17, 16], + [11, 15, 17], + [12, 0, 14], + [13, 1, 12], + [14, 2, 13], + ] +) C = Dimension("C") diff --git a/min-extra-requirements-test.txt b/min-extra-requirements-test.txt index 37d48499c5..8c0bc98962 100644 --- a/min-extra-requirements-test.txt +++ b/min-extra-requirements-test.txt @@ -53,6 +53,7 @@ astunparse==1.6.3; python_version < "3.9" attrs==21.3 black==22.3 boltons==20.1 +bump-my-version==0.12.0 cached-property==1.5.1 clang-format==9.0 click==8.0.0 @@ -112,6 +113,6 @@ tabulate==0.8.10 tomli==2.0.1; python_version < "3.11" tox==3.2.0 types-all==1.0.0 -typing-extensions==4.2 +typing-extensions==4.3.0 xxhash==1.4.4 ##[[[end]]] diff --git a/min-requirements-test.txt b/min-requirements-test.txt index c67125df6e..dc4c2df4f7 100644 --- a/min-requirements-test.txt +++ b/min-requirements-test.txt @@ -50,6 +50,7 @@ astunparse==1.6.3; python_version < "3.9" attrs==21.3 black==22.3 boltons==20.1 +bump-my-version==0.12.0 cached-property==1.5.1 clang-format==9.0 click==8.0.0 @@ -105,6 +106,6 @@ tabulate==0.8.10 tomli==2.0.1; python_version < "3.11" tox==3.2.0 types-all==1.0.0 -typing-extensions==4.2 +typing-extensions==4.3.0 xxhash==1.4.4 ##[[[end]]] diff --git a/pyproject.toml b/pyproject.toml index 5da8843354..72d071c346 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -278,9 +278,6 @@ target-version = 'py38' [tool.ruff.format] docstring-code-format = true -# Reevaluate once ruff adds support for single-line ellipsis in the stable style. -# Track: https://github.com/astral-sh/ruff/issues/8678) -preview = true [tool.ruff.lint] # # Rules sets: diff --git a/requirements-dev.txt b/requirements-dev.txt index 2d9ad44ba0..e0dadab67a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,15 +6,17 @@ # aenum==3.1.15 # via -c constraints.txt, dace alabaster==0.7.13 # via -c constraints.txt, sphinx +annotated-types==0.6.0 # via -c constraints.txt, pydantic asttokens==2.4.1 # via -c constraints.txt, devtools, stack-data astunparse==1.6.3 ; python_version < "3.9" # via -c constraints.txt, dace, gt4py (pyproject.toml) attrs==23.2.0 # via -c constraints.txt, flake8-bugbear, flake8-eradicate, gt4py (pyproject.toml), hypothesis, jsonschema, referencing babel==2.14.0 # via -c constraints.txt, sphinx backcall==0.2.0 # via -c constraints.txt, ipython -black==24.2.0 # via -c constraints.txt, gt4py (pyproject.toml) +black==24.3.0 # via -c constraints.txt, gt4py (pyproject.toml) blinker==1.7.0 # via -c constraints.txt, flask -boltons==23.1.1 # via -c constraints.txt, gt4py (pyproject.toml) -build==1.0.3 # via -c constraints.txt, pip-tools +boltons==24.0.0 # via -c constraints.txt, gt4py (pyproject.toml) +build==1.2.1 # via -c constraints.txt, pip-tools +bump-my-version==0.12.0 # via -c constraints.txt, -r requirements-dev.in cached-property==1.5.2 # via -c constraints.txt, gt4py (pyproject.toml) cachetools==5.3.3 # via -c constraints.txt, tox certifi==2024.2.2 # via -c constraints.txt, requests @@ -22,14 +24,14 @@ cffi==1.16.0 # via -c constraints.txt, cryptography cfgv==3.4.0 # via -c constraints.txt, pre-commit chardet==5.2.0 # via -c constraints.txt, tox charset-normalizer==3.3.2 # via -c constraints.txt, requests -clang-format==17.0.6 # via -c constraints.txt, -r requirements-dev.in, gt4py (pyproject.toml) -click==8.1.7 # via -c constraints.txt, black, flask, gt4py (pyproject.toml), pip-tools -cmake==3.28.3 # via -c constraints.txt, gt4py (pyproject.toml) -cogapp==3.3.0 # via -c constraints.txt, -r requirements-dev.in +clang-format==18.1.2 # via -c constraints.txt, -r requirements-dev.in, gt4py (pyproject.toml) +click==8.1.7 # via -c constraints.txt, black, bump-my-version, flask, gt4py (pyproject.toml), pip-tools, rich-click +cmake==3.29.0.1 # via -c constraints.txt, gt4py (pyproject.toml) +cogapp==3.4.1 # via -c constraints.txt, -r requirements-dev.in colorama==0.4.6 # via -c constraints.txt, tox -comm==0.2.1 # via -c constraints.txt, ipykernel +comm==0.2.2 # via -c constraints.txt, ipykernel contourpy==1.1.1 # via -c constraints.txt, matplotlib -coverage[toml]==7.4.3 # via -c constraints.txt, -r requirements-dev.in, pytest-cov +coverage[toml]==7.4.4 # via -c constraints.txt, -r requirements-dev.in, pytest-cov cryptography==42.0.5 # via -c constraints.txt, types-paramiko, types-pyopenssl, types-redis cycler==0.12.1 # via -c constraints.txt, matplotlib cytoolz==0.12.3 # via -c constraints.txt, gt4py (pyproject.toml) @@ -47,12 +49,12 @@ exceptiongroup==1.2.0 # via -c constraints.txt, hypothesis, pytest execnet==2.0.2 # via -c constraints.txt, pytest-cache, pytest-xdist executing==2.0.1 # via -c constraints.txt, devtools, stack-data factory-boy==3.3.0 # via -c constraints.txt, gt4py (pyproject.toml), pytest-factoryboy -faker==23.2.1 # via -c constraints.txt, factory-boy +faker==24.4.0 # via -c constraints.txt, factory-boy fastjsonschema==2.19.1 # via -c constraints.txt, nbformat -filelock==3.13.1 # via -c constraints.txt, tox, virtualenv +filelock==3.13.3 # via -c constraints.txt, tox, virtualenv flake8==7.0.0 # via -c constraints.txt, -r requirements-dev.in, flake8-bugbear, flake8-builtins, flake8-debugger, flake8-docstrings, flake8-eradicate, flake8-mutable, flake8-pyproject, flake8-rst-docstrings flake8-bugbear==24.2.6 # via -c constraints.txt, -r requirements-dev.in -flake8-builtins==2.2.0 # via -c constraints.txt, -r requirements-dev.in +flake8-builtins==2.4.0 # via -c constraints.txt, -r requirements-dev.in flake8-debugger==4.1.2 # via -c constraints.txt, -r requirements-dev.in flake8-docstrings==1.7.0 # via -c constraints.txt, -r requirements-dev.in flake8-eradicate==1.5.0 # via -c constraints.txt, -r requirements-dev.in @@ -60,19 +62,19 @@ flake8-mutable==1.2.0 # via -c constraints.txt, -r requirements-dev.in flake8-pyproject==1.2.3 # via -c constraints.txt, -r requirements-dev.in flake8-rst-docstrings==0.3.0 # via -c constraints.txt, -r requirements-dev.in flask==3.0.2 # via -c constraints.txt, dace -fonttools==4.49.0 # via -c constraints.txt, matplotlib +fonttools==4.50.0 # via -c constraints.txt, matplotlib fparser==0.1.4 # via -c constraints.txt, dace -frozendict==2.4.0 # via -c constraints.txt, gt4py (pyproject.toml) +frozendict==2.4.1 # via -c constraints.txt, gt4py (pyproject.toml) gridtools-cpp==2.3.2 # via -c constraints.txt, gt4py (pyproject.toml) -hypothesis==6.98.13 # via -c constraints.txt, -r requirements-dev.in, gt4py (pyproject.toml) +hypothesis==6.100.0 # via -c constraints.txt, -r requirements-dev.in, gt4py (pyproject.toml) identify==2.5.35 # via -c constraints.txt, pre-commit idna==3.6 # via -c constraints.txt, requests imagesize==1.4.1 # via -c constraints.txt, sphinx -importlib-metadata==7.0.1 # via -c constraints.txt, build, flask, jax, jupyter-client, sphinx -importlib-resources==6.1.2 ; python_version < "3.9" # via -c constraints.txt, gt4py (pyproject.toml), jsonschema, jsonschema-specifications, matplotlib +importlib-metadata==7.1.0 # via -c constraints.txt, build, flask, jax, jupyter-client, sphinx +importlib-resources==6.4.0 ; python_version < "3.9" # via -c constraints.txt, gt4py (pyproject.toml), jsonschema, jsonschema-specifications, matplotlib inflection==0.5.1 # via -c constraints.txt, pytest-factoryboy iniconfig==2.0.0 # via -c constraints.txt, pytest -ipykernel==6.29.3 # via -c constraints.txt, nbmake +ipykernel==6.29.4 # via -c constraints.txt, nbmake ipython==8.12.3 # via -c constraints.txt, ipykernel isort==5.13.2 # via -c constraints.txt, -r requirements-dev.in itsdangerous==2.1.2 # via -c constraints.txt, flask @@ -82,13 +84,13 @@ jedi==0.19.1 # via -c constraints.txt, ipython jinja2==3.1.3 # via -c constraints.txt, flask, gt4py (pyproject.toml), sphinx jsonschema==4.21.1 # via -c constraints.txt, nbformat jsonschema-specifications==2023.12.1 # via -c constraints.txt, jsonschema -jupyter-client==8.6.0 # via -c constraints.txt, ipykernel, nbclient -jupyter-core==5.7.1 # via -c constraints.txt, ipykernel, jupyter-client, nbformat +jupyter-client==8.6.1 # via -c constraints.txt, ipykernel, nbclient +jupyter-core==5.7.2 # via -c constraints.txt, ipykernel, jupyter-client, nbformat jupytext==1.16.1 # via -c constraints.txt, -r requirements-dev.in kiwisolver==1.4.5 # via -c constraints.txt, matplotlib lark==1.1.9 # via -c constraints.txt, gt4py (pyproject.toml) mako==1.3.2 # via -c constraints.txt, gt4py (pyproject.toml) -markdown-it-py==3.0.0 # via -c constraints.txt, jupytext, mdit-py-plugins +markdown-it-py==3.0.0 # via -c constraints.txt, jupytext, mdit-py-plugins, rich markupsafe==2.1.5 # via -c constraints.txt, jinja2, mako, werkzeug matplotlib==3.7.5 # via -c constraints.txt, -r requirements-dev.in matplotlib-inline==0.1.6 # via -c constraints.txt, ipykernel, ipython @@ -97,12 +99,12 @@ mdit-py-plugins==0.4.0 # via -c constraints.txt, jupytext mdurl==0.1.2 # via -c constraints.txt, markdown-it-py ml-dtypes==0.2.0 # via -c constraints.txt, jax, jaxlib mpmath==1.3.0 # via -c constraints.txt, sympy -mypy==1.8.0 # via -c constraints.txt, -r requirements-dev.in +mypy==1.9.0 # via -c constraints.txt, -r requirements-dev.in mypy-extensions==1.0.0 # via -c constraints.txt, black, mypy nanobind==1.9.2 # via -c constraints.txt, gt4py (pyproject.toml) nbclient==0.6.8 # via -c constraints.txt, nbmake -nbformat==5.9.2 # via -c constraints.txt, jupytext, nbclient, nbmake -nbmake==1.5.0 # via -c constraints.txt, -r requirements-dev.in +nbformat==5.10.3 # via -c constraints.txt, jupytext, nbclient, nbmake +nbmake==1.5.3 # via -c constraints.txt, -r requirements-dev.in nest-asyncio==1.6.0 # via -c constraints.txt, ipykernel, nbclient networkx==3.1 # via -c constraints.txt, dace ninja==1.11.1.1 # via -c constraints.txt, gt4py (pyproject.toml) @@ -110,14 +112,14 @@ nodeenv==1.8.0 # via -c constraints.txt, pre-commit numpy==1.24.4 # via -c constraints.txt, contourpy, dace, gt4py (pyproject.toml), jax, jaxlib, matplotlib, ml-dtypes, opt-einsum, scipy, types-jack-client opt-einsum==3.3.0 # via -c constraints.txt, jax ordered-set==4.1.0 # via -c constraints.txt, deepdiff -packaging==23.2 # via -c constraints.txt, black, build, gt4py (pyproject.toml), ipykernel, jupytext, matplotlib, pyproject-api, pytest, setuptools-scm, sphinx, tox +packaging==24.0 # via -c constraints.txt, black, build, gt4py (pyproject.toml), ipykernel, jupytext, matplotlib, pyproject-api, pytest, pytest-factoryboy, setuptools-scm, sphinx, tox parso==0.8.3 # via -c constraints.txt, jedi pathspec==0.12.1 # via -c constraints.txt, black pexpect==4.9.0 # via -c constraints.txt, ipython pickleshare==0.7.5 # via -c constraints.txt, ipython -pillow==10.2.0 # via -c constraints.txt, matplotlib -pip-tools==7.4.0 # via -c constraints.txt, -r requirements-dev.in -pipdeptree==2.15.1 # via -c constraints.txt, -r requirements-dev.in +pillow==10.3.0 # via -c constraints.txt, matplotlib +pip-tools==7.4.1 # via -c constraints.txt, -r requirements-dev.in +pipdeptree==2.16.2 # via -c constraints.txt, -r requirements-dev.in pkgutil-resolve-name==1.3.10 # via -c constraints.txt, jsonschema platformdirs==4.2.0 # via -c constraints.txt, black, jupyter-core, tox, virtualenv pluggy==1.4.0 # via -c constraints.txt, pytest, tox @@ -127,29 +129,35 @@ prompt-toolkit==3.0.43 # via -c constraints.txt, ipython psutil==5.9.8 # via -c constraints.txt, -r requirements-dev.in, ipykernel, pytest-xdist ptyprocess==0.7.0 # via -c constraints.txt, pexpect pure-eval==0.2.2 # via -c constraints.txt, stack-data -pybind11==2.11.1 # via -c constraints.txt, gt4py (pyproject.toml) +pybind11==2.12.0 # via -c constraints.txt, gt4py (pyproject.toml) pycodestyle==2.11.1 # via -c constraints.txt, flake8, flake8-debugger -pycparser==2.21 # via -c constraints.txt, cffi +pycparser==2.22 # via -c constraints.txt, cffi +pydantic==2.0a3 # via -c constraints.txt, bump-my-version, pydantic-settings +pydantic-core==0.25.0 # via -c constraints.txt, pydantic +pydantic-settings==1.99 # via -c constraints.txt, bump-my-version pydocstyle==6.3.0 # via -c constraints.txt, flake8-docstrings pyflakes==3.2.0 # via -c constraints.txt, flake8 -pygments==2.17.2 # via -c constraints.txt, -r requirements-dev.in, devtools, flake8-rst-docstrings, ipython, nbmake, sphinx -pyparsing==3.1.1 # via -c constraints.txt, matplotlib +pygments==2.17.2 # via -c constraints.txt, -r requirements-dev.in, devtools, flake8-rst-docstrings, ipython, nbmake, rich, sphinx +pyparsing==3.1.2 # via -c constraints.txt, matplotlib pyproject-api==1.6.1 # via -c constraints.txt, tox pyproject-hooks==1.0.0 # via -c constraints.txt, build, pip-tools -pytest==8.0.2 # via -c constraints.txt, -r requirements-dev.in, gt4py (pyproject.toml), nbmake, pytest-cache, pytest-cov, pytest-factoryboy, pytest-xdist +pytest==8.1.1 # via -c constraints.txt, -r requirements-dev.in, gt4py (pyproject.toml), nbmake, pytest-cache, pytest-cov, pytest-factoryboy, pytest-xdist pytest-cache==1.0 # via -c constraints.txt, -r requirements-dev.in -pytest-cov==4.1.0 # via -c constraints.txt, -r requirements-dev.in -pytest-factoryboy==2.6.0 # via -c constraints.txt, -r requirements-dev.in +pytest-cov==5.0.0 # via -c constraints.txt, -r requirements-dev.in +pytest-factoryboy==2.7.0 # via -c constraints.txt, -r requirements-dev.in pytest-xdist[psutil]==3.5.0 # via -c constraints.txt, -r requirements-dev.in -python-dateutil==2.8.2 # via -c constraints.txt, faker, jupyter-client, matplotlib +python-dateutil==2.9.0.post0 # via -c constraints.txt, faker, jupyter-client, matplotlib +python-dotenv==1.0.1 # via -c constraints.txt, pydantic-settings pytz==2024.1 # via -c constraints.txt, babel pyyaml==6.0.1 # via -c constraints.txt, dace, jupytext, pre-commit pyzmq==25.1.2 # via -c constraints.txt, ipykernel, jupyter-client -referencing==0.33.0 # via -c constraints.txt, jsonschema, jsonschema-specifications +referencing==0.34.0 # via -c constraints.txt, jsonschema, jsonschema-specifications requests==2.31.0 # via -c constraints.txt, dace, sphinx restructuredtext-lint==1.4.0 # via -c constraints.txt, flake8-rst-docstrings +rich==13.7.1 # via -c constraints.txt, bump-my-version, rich-click +rich-click==1.7.4 # via -c constraints.txt, bump-my-version rpds-py==0.18.0 # via -c constraints.txt, jsonschema, referencing -ruff==0.2.2 # via -c constraints.txt, -r requirements-dev.in +ruff==0.3.5 # via -c constraints.txt, -r requirements-dev.in scipy==1.10.1 # via -c constraints.txt, jax, jaxlib setuptools-scm==8.0.4 # via -c constraints.txt, fparser six==1.16.0 # via -c constraints.txt, asttokens, astunparse, python-dateutil @@ -169,36 +177,37 @@ sympy==1.9 # via -c constraints.txt, dace, gt4py (pyproject.toml) tabulate==0.9.0 # via -c constraints.txt, gt4py (pyproject.toml) toml==0.10.2 # via -c constraints.txt, jupytext tomli==2.0.1 ; python_version < "3.11" # via -c constraints.txt, -r requirements-dev.in, black, build, coverage, flake8-pyproject, mypy, pip-tools, pyproject-api, pyproject-hooks, pytest, setuptools-scm, tox +tomlkit==0.12.4 # via -c constraints.txt, bump-my-version toolz==0.12.1 # via -c constraints.txt, cytoolz tornado==6.4 # via -c constraints.txt, ipykernel, jupyter-client -tox==4.13.0 # via -c constraints.txt, -r requirements-dev.in -traitlets==5.14.1 # via -c constraints.txt, comm, ipykernel, ipython, jupyter-client, jupyter-core, matplotlib-inline, nbclient, nbformat -types-aiofiles==23.2.0.20240106 # via -c constraints.txt, types-all +tox==4.14.2 # via -c constraints.txt, -r requirements-dev.in +traitlets==5.14.2 # via -c constraints.txt, comm, ipykernel, ipython, jupyter-client, jupyter-core, matplotlib-inline, nbclient, nbformat +types-aiofiles==23.2.0.20240331 # via -c constraints.txt, types-all types-all==1.0.0 # via -c constraints.txt, -r requirements-dev.in types-annoy==1.17.8.4 # via -c constraints.txt, types-all types-atomicwrites==1.4.5.1 # via -c constraints.txt, types-all types-backports==0.1.3 # via -c constraints.txt, types-all types-backports-abc==0.5.2 # via -c constraints.txt, types-all -types-bleach==6.1.0.20240222 # via -c constraints.txt, types-all +types-bleach==6.1.0.20240331 # via -c constraints.txt, types-all types-boto==2.49.18.20240205 # via -c constraints.txt, types-all types-cachetools==5.3.0.7 # via -c constraints.txt, types-all types-certifi==2021.10.8.3 # via -c constraints.txt, types-all -types-cffi==1.16.0.20240106 # via -c constraints.txt, types-jack-client +types-cffi==1.16.0.20240331 # via -c constraints.txt, types-jack-client types-characteristic==14.3.7 # via -c constraints.txt, types-all types-chardet==5.0.4.6 # via -c constraints.txt, types-all types-click==7.1.8 # via -c constraints.txt, types-all, types-flask -types-click-spinner==0.1.13.20240106 # via -c constraints.txt, types-all -types-colorama==0.4.15.20240205 # via -c constraints.txt, types-all +types-click-spinner==0.1.13.20240311 # via -c constraints.txt, types-all +types-colorama==0.4.15.20240311 # via -c constraints.txt, types-all types-contextvars==2.4.7.3 # via -c constraints.txt, types-all -types-croniter==2.0.0.20240106 # via -c constraints.txt, types-all +types-croniter==2.0.0.20240321 # via -c constraints.txt, types-all types-cryptography==3.3.23.2 # via -c constraints.txt, types-all, types-openssl-python, types-pyjwt types-dataclasses==0.6.6 # via -c constraints.txt, types-all -types-dateparser==1.1.4.20240106 # via -c constraints.txt, types-all +types-dateparser==1.1.4.20240331 # via -c constraints.txt, types-all types-datetimerange==2.0.0.6 # via -c constraints.txt, types-all -types-decorator==5.1.8.20240106 # via -c constraints.txt, types-all -types-deprecated==1.2.9.20240106 # via -c constraints.txt, types-all +types-decorator==5.1.8.20240310 # via -c constraints.txt, types-all +types-deprecated==1.2.9.20240311 # via -c constraints.txt, types-all types-docopt==0.6.11.4 # via -c constraints.txt, types-all -types-docutils==0.20.0.20240227 # via -c constraints.txt, types-all +types-docutils==0.20.0.20240331 # via -c constraints.txt, types-all types-emoji==2.1.0.3 # via -c constraints.txt, types-all types-enum34==1.1.8 # via -c constraints.txt, types-all types-fb303==1.0.0 # via -c constraints.txt, types-all, types-scribe @@ -209,53 +218,53 @@ types-freezegun==1.1.10 # via -c constraints.txt, types-all types-frozendict==2.0.9 # via -c constraints.txt, types-all types-futures==3.3.8 # via -c constraints.txt, types-all types-geoip2==3.0.0 # via -c constraints.txt, types-all -types-html5lib==1.1.11.20240222 # via -c constraints.txt, types-bleach +types-html5lib==1.1.11.20240228 # via -c constraints.txt, types-bleach types-ipaddress==1.0.8 # via -c constraints.txt, types-all, types-maxminddb types-itsdangerous==1.1.6 # via -c constraints.txt, types-all types-jack-client==0.5.10.20240106 # via -c constraints.txt, types-all types-jinja2==2.11.9 # via -c constraints.txt, types-all, types-flask types-kazoo==0.1.3 # via -c constraints.txt, types-all -types-markdown==3.5.0.20240129 # via -c constraints.txt, types-all +types-markdown==3.6.0.20240316 # via -c constraints.txt, types-all types-markupsafe==1.1.10 # via -c constraints.txt, types-all, types-jinja2 types-maxminddb==1.5.0 # via -c constraints.txt, types-all, types-geoip2 -types-mock==5.1.0.20240106 # via -c constraints.txt, types-all -types-mypy-extensions==1.0.0.5 # via -c constraints.txt, types-all +types-mock==5.1.0.20240311 # via -c constraints.txt, types-all +types-mypy-extensions==1.0.0.20240311 # via -c constraints.txt, types-all types-nmap==0.1.6 # via -c constraints.txt, types-all types-openssl-python==0.1.3 # via -c constraints.txt, types-all types-orjson==3.6.2 # via -c constraints.txt, types-all -types-paramiko==3.4.0.20240205 # via -c constraints.txt, types-all, types-pysftp +types-paramiko==3.4.0.20240311 # via -c constraints.txt, types-all, types-pysftp types-pathlib2==2.3.0 # via -c constraints.txt, types-all -types-pillow==10.2.0.20240213 # via -c constraints.txt, types-all +types-pillow==10.2.0.20240331 # via -c constraints.txt, types-all types-pkg-resources==0.1.3 # via -c constraints.txt, types-all -types-polib==1.2.0.20240115 # via -c constraints.txt, types-all -types-protobuf==4.24.0.20240129 # via -c constraints.txt, types-all +types-polib==1.2.0.20240327 # via -c constraints.txt, types-all +types-protobuf==4.24.0.20240311 # via -c constraints.txt, types-all types-pyaudio==0.2.16.20240106 # via -c constraints.txt, types-all -types-pycurl==7.45.2.20240106 # via -c constraints.txt, types-all -types-pyfarmhash==0.3.1.2 # via -c constraints.txt, types-all +types-pycurl==7.45.2.20240311 # via -c constraints.txt, types-all +types-pyfarmhash==0.3.1.20240311 # via -c constraints.txt, types-all types-pyjwt==1.7.1 # via -c constraints.txt, types-all types-pymssql==2.1.0 # via -c constraints.txt, types-all types-pymysql==1.1.0.1 # via -c constraints.txt, types-all -types-pyopenssl==24.0.0.20240130 # via -c constraints.txt, types-redis +types-pyopenssl==24.0.0.20240311 # via -c constraints.txt, types-redis types-pyrfc3339==1.1.1.5 # via -c constraints.txt, types-all types-pysftp==0.2.17.20240106 # via -c constraints.txt, types-all -types-python-dateutil==2.8.19.20240106 # via -c constraints.txt, types-all, types-datetimerange +types-python-dateutil==2.9.0.20240316 # via -c constraints.txt, types-all, types-datetimerange types-python-gflags==3.1.7.3 # via -c constraints.txt, types-all -types-python-slugify==8.0.2.20240127 # via -c constraints.txt, types-all +types-python-slugify==8.0.2.20240310 # via -c constraints.txt, types-all types-pytz==2024.1.0.20240203 # via -c constraints.txt, types-all, types-tzlocal types-pyvmomi==8.0.0.6 # via -c constraints.txt, types-all -types-pyyaml==6.0.12.12 # via -c constraints.txt, types-all -types-redis==4.6.0.20240218 # via -c constraints.txt, types-all -types-requests==2.31.0.20240218 # via -c constraints.txt, types-all +types-pyyaml==6.0.12.20240311 # via -c constraints.txt, types-all +types-redis==4.6.0.20240311 # via -c constraints.txt, types-all +types-requests==2.31.0.20240402 # via -c constraints.txt, types-all types-retry==0.9.9.4 # via -c constraints.txt, types-all types-routes==2.5.0 # via -c constraints.txt, types-all types-scribe==2.0.0 # via -c constraints.txt, types-all -types-setuptools==69.1.0.20240223 # via -c constraints.txt, types-cffi -types-simplejson==3.19.0.20240218 # via -c constraints.txt, types-all +types-setuptools==69.2.0.20240317 # via -c constraints.txt, types-cffi +types-simplejson==3.19.0.20240310 # via -c constraints.txt, types-all types-singledispatch==4.1.0.0 # via -c constraints.txt, types-all -types-six==1.16.21.20240106 # via -c constraints.txt, types-all +types-six==1.16.21.20240311 # via -c constraints.txt, types-all types-tabulate==0.9.0.20240106 # via -c constraints.txt, types-all types-termcolor==1.1.6.2 # via -c constraints.txt, types-all -types-toml==0.10.8.7 # via -c constraints.txt, types-all +types-toml==0.10.8.20240310 # via -c constraints.txt, types-all types-tornado==5.1.1 # via -c constraints.txt, types-all types-typed-ast==1.5.8.7 # via -c constraints.txt, types-all types-tzlocal==5.1.0.1 # via -c constraints.txt, types-all @@ -263,16 +272,16 @@ types-ujson==5.9.0.0 # via -c constraints.txt, types-all types-waitress==2.1.4.20240106 # via -c constraints.txt, types-all types-werkzeug==1.0.9 # via -c constraints.txt, types-all, types-flask types-xxhash==3.0.5.2 # via -c constraints.txt, types-all -typing-extensions==4.5.0 # via -c constraints.txt, black, faker, gt4py (pyproject.toml), ipython, mypy, pytest-factoryboy, setuptools-scm +typing-extensions==4.5.0 # via -c constraints.txt, annotated-types, black, faker, gt4py (pyproject.toml), ipython, mypy, pydantic, pydantic-core, pytest-factoryboy, rich, rich-click, setuptools-scm urllib3==2.2.1 # via -c constraints.txt, requests, types-requests virtualenv==20.25.1 # via -c constraints.txt, pre-commit, tox wcwidth==0.2.13 # via -c constraints.txt, prompt-toolkit websockets==12.0 # via -c constraints.txt, dace -werkzeug==3.0.1 # via -c constraints.txt, flask -wheel==0.42.0 # via -c constraints.txt, astunparse, pip-tools +werkzeug==3.0.2 # via -c constraints.txt, flask +wheel==0.43.0 # via -c constraints.txt, astunparse, pip-tools xxhash==3.0.0 # via -c constraints.txt, gt4py (pyproject.toml) -zipp==3.17.0 # via -c constraints.txt, importlib-metadata, importlib-resources +zipp==3.18.1 # via -c constraints.txt, importlib-metadata, importlib-resources # The following packages are considered to be unsafe in a requirements file: -pip==24.0 # via -c constraints.txt, pip-tools -setuptools==69.1.1 # via -c constraints.txt, gt4py (pyproject.toml), nodeenv, pip-tools, setuptools-scm +pip==24.0 # via -c constraints.txt, pip-tools, pipdeptree +setuptools==69.2.0 # via -c constraints.txt, gt4py (pyproject.toml), nodeenv, pip-tools, setuptools-scm diff --git a/src/gt4py/cartesian/backend/base.py b/src/gt4py/cartesian/backend/base.py index 61488f1135..5325893a04 100644 --- a/src/gt4py/cartesian/backend/base.py +++ b/src/gt4py/cartesian/backend/base.py @@ -411,9 +411,11 @@ def build_extension_module( assert module_name == qualified_pyext_name - self.builder.with_backend_data({ - "pyext_module_name": module_name, - "pyext_file_path": file_path, - }) + self.builder.with_backend_data( + { + "pyext_module_name": module_name, + "pyext_file_path": file_path, + } + ) return module_name, file_path diff --git a/src/gt4py/cartesian/backend/dace_backend.py b/src/gt4py/cartesian/backend/dace_backend.py index 7ec542e98c..6d60422d5a 100644 --- a/src/gt4py/cartesian/backend/dace_backend.py +++ b/src/gt4py/cartesian/backend/dace_backend.py @@ -86,12 +86,14 @@ def _get_expansion_priority_cpu(node: StencilComputation): expansion_priority = [] if node.has_splittable_regions(): expansion_priority.append(["Sections", "Stages", "I", "J", "K"]) - expansion_priority.extend([ - ["TileJ", "TileI", "IMap", "JMap", "Sections", "K", "Stages"], - ["TileJ", "TileI", "IMap", "JMap", "Sections", "Stages", "K"], - ["TileJ", "TileI", "Sections", "Stages", "IMap", "JMap", "K"], - ["TileJ", "TileI", "Sections", "K", "Stages", "JMap", "IMap"], - ]) + expansion_priority.extend( + [ + ["TileJ", "TileI", "IMap", "JMap", "Sections", "K", "Stages"], + ["TileJ", "TileI", "IMap", "JMap", "Sections", "Stages", "K"], + ["TileJ", "TileI", "Sections", "Stages", "IMap", "JMap", "K"], + ["TileJ", "TileI", "Sections", "K", "Stages", "JMap", "IMap"], + ] + ) return expansion_priority @@ -487,16 +489,18 @@ def generate_tmp_allocs(self, sdfg): threadlocal_fmt, "}}", ] - res.extend([ - fmt.format( - name=name, - sdfg_id=array_sdfg.sdfg_id, - dtype=array.dtype.ctype, - size=f"omp_max_threads * ({array.total_size})", - local_size=array.total_size, - ) - for fmt in fmts - ]) + res.extend( + [ + fmt.format( + name=name, + sdfg_id=array_sdfg.sdfg_id, + dtype=array.dtype.ctype, + size=f"omp_max_threads * ({array.total_size})", + local_size=array.total_size, + ) + for fmt in fmts + ] + ) return res @staticmethod @@ -613,18 +617,22 @@ def generate_dace_args(self, stencil_ir: gtir.Stencil, sdfg: dace.SDFG) -> List[ # api field strides fmt = "gt::sid::get_stride<{dim}>(gt::sid::get_strides(__{name}_sid))" - symbols.update({ - f"__{name}_{dim}_stride": fmt.format( - dim=f"gt::stencil::dim::{dim.lower()}", name=name - ) - for dim in dims - }) - symbols.update({ - f"__{name}_d{dim}_stride": fmt.format( - dim=f"gt::integral_constant", name=name - ) - for dim in range(data_ndim) - }) + symbols.update( + { + f"__{name}_{dim}_stride": fmt.format( + dim=f"gt::stencil::dim::{dim.lower()}", name=name + ) + for dim in dims + } + ) + symbols.update( + { + f"__{name}_d{dim}_stride": fmt.format( + dim=f"gt::integral_constant", name=name + ) + for dim in range(data_ndim) + } + ) # api field pointers fmt = """gt::sid::multi_shifted( @@ -738,12 +746,14 @@ def apply(cls, stencil_ir: gtir.Stencil, sdfg: dace.SDFG, module_name: str, *, b class DaCePyExtModuleGenerator(PyExtModuleGenerator): def generate_imports(self): - return "\n".join([ - *super().generate_imports().splitlines(), - "import dace", - "import copy", - "from gt4py.cartesian.backend.dace_stencil_object import DaCeStencilObject", - ]) + return "\n".join( + [ + *super().generate_imports().splitlines(), + "import dace", + "import copy", + "from gt4py.cartesian.backend.dace_stencil_object import DaCeStencilObject", + ] + ) def generate_base_class_name(self): return "DaCeStencilObject" diff --git a/src/gt4py/cartesian/backend/numpy_backend.py b/src/gt4py/cartesian/backend/numpy_backend.py index b43e4d979a..6f1aab52cf 100644 --- a/src/gt4py/cartesian/backend/numpy_backend.py +++ b/src/gt4py/cartesian/backend/numpy_backend.py @@ -42,12 +42,14 @@ def generate_imports(self) -> str: comp_pkg = ( self.builder.caching.module_prefix + "computation" + self.builder.caching.module_postfix ) - return "\n".join([ - *super().generate_imports().splitlines(), - "import pathlib", - "from gt4py.cartesian.utils import make_module_from_file", - f'computation = make_module_from_file("{comp_pkg}", pathlib.Path(__file__).parent / "{comp_pkg}.py")', - ]) + return "\n".join( + [ + *super().generate_imports().splitlines(), + "import pathlib", + "from gt4py.cartesian.utils import make_module_from_file", + f'computation = make_module_from_file("{comp_pkg}", pathlib.Path(__file__).parent / "{comp_pkg}.py")', + ] + ) def generate_implementation(self) -> str: params = [f"{p.name}={p.name}" for p in self.builder.gtir.params] diff --git a/src/gt4py/cartesian/caching.py b/src/gt4py/cartesian/caching.py index 1b78973b6d..4d716a6c79 100644 --- a/src/gt4py/cartesian/caching.py +++ b/src/gt4py/cartesian/caching.py @@ -259,9 +259,9 @@ def is_cache_info_available_and_consistent( and cache_info_ns.module_shash == module_shash ) if validate_extra: - result &= all([ - cache_info[key] == validate_extra[key] for key in validate_extra - ]) + result &= all( + [cache_info[key] == validate_extra[key] for key in validate_extra] + ) except Exception as err: if not catch_exceptions: raise err diff --git a/src/gt4py/cartesian/frontend/gtscript_frontend.py b/src/gt4py/cartesian/frontend/gtscript_frontend.py index b85f203c01..fc19b8c253 100644 --- a/src/gt4py/cartesian/frontend/gtscript_frontend.py +++ b/src/gt4py/cartesian/frontend/gtscript_frontend.py @@ -1520,9 +1520,9 @@ def visit_With(self, node: ast.With): self.parsing_horizontal_region = True intervals_dicts = self._visit_with_horizontal(node.items[0], loc) - all_stmts = gt_utils.flatten([ - gtc_utils.listify(self.visit(stmt)) for stmt in node.body - ]) + all_stmts = gt_utils.flatten( + [gtc_utils.listify(self.visit(stmt)) for stmt in node.body] + ) self.parsing_horizontal_region = False stmts = list(filter(lambda stmt: isinstance(stmt, nodes.Decl), all_stmts)) body_block = nodes.BlockStmt( @@ -1536,10 +1536,12 @@ def visit_With(self, node: ast.With): "The following variables are" f"written before being referenced with an offset in a horizontal region: {', '.join(written_then_offset)}" ) - stmts.extend([ - nodes.HorizontalIf(intervals=intervals_dict, body=body_block) - for intervals_dict in intervals_dicts - ]) + stmts.extend( + [ + nodes.HorizontalIf(intervals=intervals_dict, body=body_block) + for intervals_dict in intervals_dicts + ] + ) return stmts else: # If we find nested `with` blocks flatten them, i.e. transform @@ -1902,12 +1904,14 @@ def resolve_external_symbols( for name, accesses in resolved_imports.items(): if accesses: for attr_name, attr_nodes in accesses.items(): - resolved_values_list.append(( - attr_name, - GTScriptParser.eval_external( - attr_name, context, nodes.Location.from_ast_node(attr_nodes[0]) - ), - )) + resolved_values_list.append( + ( + attr_name, + GTScriptParser.eval_external( + attr_name, context, nodes.Location.from_ast_node(attr_nodes[0]) + ), + ) + ) elif not exhaustive: resolved_values_list.append((name, GTScriptParser.eval_external(name, context))) diff --git a/src/gt4py/cartesian/gtc/dace/expansion/daceir_builder.py b/src/gt4py/cartesian/gtc/dace/expansion/daceir_builder.py index c882c1bb96..9a214441ad 100644 --- a/src/gt4py/cartesian/gtc/dace/expansion/daceir_builder.py +++ b/src/gt4py/cartesian/gtc/dace/expansion/daceir_builder.py @@ -437,9 +437,9 @@ def visit_HorizontalExecution( ) expansion_items = global_ctx.library_node.expansion_specification[stages_idx + 1 :] - iteration_ctx = iteration_ctx.push_axes_extents({ - k: v for k, v in zip(dcir.Axis.dims_horizontal(), extent) - }) + iteration_ctx = iteration_ctx.push_axes_extents( + {k: v for k, v in zip(dcir.Axis.dims_horizontal(), extent)} + ) iteration_ctx = iteration_ctx.push_expansion_items(expansion_items) assert iteration_ctx.grid_subset == dcir.GridSubset.single_gridpoint() diff --git a/src/gt4py/cartesian/gtc/dace/nodes.py b/src/gt4py/cartesian/gtc/dace/nodes.py index 7a0db46db5..bd8c08034c 100644 --- a/src/gt4py/cartesian/gtc/dace/nodes.py +++ b/src/gt4py/cartesian/gtc/dace/nodes.py @@ -144,10 +144,12 @@ def __init__( for decl in declarations.values() if isinstance(decl, oir.ScalarDecl) } - self.symbol_mapping.update({ - axis.domain_symbol(): dace.symbol(axis.domain_symbol(), dtype=dace.int32) - for axis in dcir.Axis.dims_horizontal() - }) + self.symbol_mapping.update( + { + axis.domain_symbol(): dace.symbol(axis.domain_symbol(), dtype=dace.int32) + for axis in dcir.Axis.dims_horizontal() + } + ) self.access_infos = compute_dcir_access_infos( oir_node, oir_decls=declarations, diff --git a/src/gt4py/cartesian/gtc/dace/utils.py b/src/gt4py/cartesian/gtc/dace/utils.py index dac0c8acc5..cfde545f40 100644 --- a/src/gt4py/cartesian/gtc/dace/utils.py +++ b/src/gt4py/cartesian/gtc/dace/utils.py @@ -129,9 +129,12 @@ def visit_VerticalLoopSection( k_grid = dcir.GridSubset.from_interval(grid_subset.intervals[dcir.Axis.K], dcir.Axis.K) inner_infos = {name: info.apply_iteration(k_grid) for name, info in inner_infos.items()} - ctx.access_infos.update({ - name: info.union(ctx.access_infos.get(name, info)) for name, info in inner_infos.items() - }) + ctx.access_infos.update( + { + name: info.union(ctx.access_infos.get(name, info)) + for name, info in inner_infos.items() + } + ) return ctx.access_infos @@ -167,9 +170,12 @@ def visit_HorizontalExecution( inner_infos = {name: info.apply_iteration(ij_grid) for name, info in inner_infos.items()} - ctx.access_infos.update({ - name: info.union(ctx.access_infos.get(name, info)) for name, info in inner_infos.items() - }) + ctx.access_infos.update( + { + name: info.union(ctx.access_infos.get(name, info)) + for name, info in inner_infos.items() + } + ) return ctx.access_infos diff --git a/src/gt4py/cartesian/gtc/definitions.py b/src/gt4py/cartesian/gtc/definitions.py index d760082872..e3bbbe8a45 100644 --- a/src/gt4py/cartesian/gtc/definitions.py +++ b/src/gt4py/cartesian/gtc/definitions.py @@ -436,9 +436,9 @@ def _apply(self, other, left_func, right_func=None): raise ValueError("Incompatible instance '{obj}'".format(obj=other)) right_func = right_func or left_func - return type(self)([ - tuple([left_func(a[0], b[0]), right_func(a[1], b[1])]) for a, b in zip(self, other) - ]) + return type(self)( + [tuple([left_func(a[0], b[0]), right_func(a[1], b[1])]) for a, b in zip(self, other)] + ) def _reduce(self, reduce_func, out_type=tuple): return out_type([reduce_func(d[0], d[1]) for d in self]) diff --git a/src/gt4py/cartesian/gtc/gtcpp/gtcpp_codegen.py b/src/gt4py/cartesian/gtc/gtcpp/gtcpp_codegen.py index 7795472b41..4e56b159d9 100644 --- a/src/gt4py/cartesian/gtc/gtcpp/gtcpp_codegen.py +++ b/src/gt4py/cartesian/gtc/gtcpp/gtcpp_codegen.py @@ -104,10 +104,12 @@ def visit_AccessorRef( if accessor_ref.name in temp_decls and accessor_ref.data_index: # Cannot use symtable. See https://github.com/GridTools/gt4py/issues/808 temp = temp_decls[accessor_ref.name] - data_index = "+".join([ - f"{self.visit(index, in_data_index=True, **kwargs)}*{int(np.prod(temp.data_dims[i+1:], initial=1))}" - for i, index in enumerate(accessor_ref.data_index) - ]) + data_index = "+".join( + [ + f"{self.visit(index, in_data_index=True, **kwargs)}*{int(np.prod(temp.data_dims[i+1:], initial=1))}" + for i, index in enumerate(accessor_ref.data_index) + ] + ) return f"eval({accessor_ref.name}({i_offset}, {j_offset}, {k_offset}))[{data_index}]" else: data_index = "".join( diff --git a/src/gt4py/cartesian/gtc/passes/oir_optimizations/temporaries.py b/src/gt4py/cartesian/gtc/passes/oir_optimizations/temporaries.py index a44a500c5a..c97b478f77 100644 --- a/src/gt4py/cartesian/gtc/passes/oir_optimizations/temporaries.py +++ b/src/gt4py/cartesian/gtc/passes/oir_optimizations/temporaries.py @@ -106,9 +106,9 @@ class LocalTemporariesToScalars(TemporariesToScalarsBase): def visit_Stencil(self, node: oir.Stencil, **kwargs: Any) -> oir.Stencil: horizontal_executions = node.walk_values().if_isinstance(oir.HorizontalExecution) - temps_without_data_dims = set([ - decl.name for decl in node.declarations if not decl.data_dims - ]) + temps_without_data_dims = set( + [decl.name for decl in node.declarations if not decl.data_dims] + ) counts: collections.Counter = sum( ( collections.Counter( diff --git a/src/gt4py/cartesian/gtc/passes/oir_optimizations/utils.py b/src/gt4py/cartesian/gtc/passes/oir_optimizations/utils.py index a0b335d70a..ddf4713757 100644 --- a/src/gt4py/cartesian/gtc/passes/oir_optimizations/utils.py +++ b/src/gt4py/cartesian/gtc/passes/oir_optimizations/utils.py @@ -173,16 +173,18 @@ class CartesianAccessCollection(GenericAccessCollection[CartesianAccess, Tuple[i class GeneralAccessCollection(GenericAccessCollection[GeneralAccess, GeneralOffsetTuple]): def cartesian_accesses(self) -> "AccessCollector.CartesianAccessCollection": - return AccessCollector.CartesianAccessCollection([ - CartesianAccess( - field=acc.field, - offset=cast(Tuple[int, int, int], acc.offset), - data_index=acc.data_index, - is_write=acc.is_write, - ) - for acc in self._ordered_accesses - if acc.offset[2] is not None - ]) + return AccessCollector.CartesianAccessCollection( + [ + CartesianAccess( + field=acc.field, + offset=cast(Tuple[int, int, int], acc.offset), + data_index=acc.data_index, + is_write=acc.is_write, + ) + for acc in self._ordered_accesses + if acc.offset[2] is not None + ] + ) def has_variable_access(self) -> bool: return any(acc.offset[2] is None for acc in self._ordered_accesses) diff --git a/src/gt4py/cartesian/testing/input_strategies.py b/src/gt4py/cartesian/testing/input_strategies.py index 37646f1af5..008b859929 100644 --- a/src/gt4py/cartesian/testing/input_strategies.py +++ b/src/gt4py/cartesian/testing/input_strategies.py @@ -178,9 +178,9 @@ def derived_shape_st(shape_st, extra: Sequence[Optional[int]]): both shape and extra elements are summed together. """ return hyp_st.builds( - lambda shape: tuple([ - d + e for d, e in itertools.zip_longest(shape, extra, fillvalue=0) if e is not None - ]), + lambda shape: tuple( + [d + e for d, e in itertools.zip_longest(shape, extra, fillvalue=0) if e is not None] + ), shape_st, ) diff --git a/src/gt4py/cartesian/testing/suites.py b/src/gt4py/cartesian/testing/suites.py index 735a314b63..d6ae5bb1fa 100644 --- a/src/gt4py/cartesian/testing/suites.py +++ b/src/gt4py/cartesian/testing/suites.py @@ -501,9 +501,9 @@ def _run_test_implementation(cls, parameters_dict, implementation): # too compl referenced_inputs = { name: info for name, info in implementation.field_info.items() if info is not None } - referenced_inputs.update({ - name: info for name, info in implementation.parameter_info.items() if info is not None - }) + referenced_inputs.update( + {name: info for name, info in implementation.parameter_info.items() if info is not None} + ) # set externals for validation method for k, v in implementation.constants.items(): diff --git a/src/gt4py/cartesian/utils/base.py b/src/gt4py/cartesian/utils/base.py index fa4ba7bce9..5dcbea3761 100644 --- a/src/gt4py/cartesian/utils/base.py +++ b/src/gt4py/cartesian/utils/base.py @@ -206,10 +206,12 @@ def classmethod_to_function(class_method, instance=None, owner=None, remove_cls_ def namespace_from_nested_dict(nested_dict): assert isinstance(nested_dict, dict) - return types.SimpleNamespace(**{ - key: namespace_from_nested_dict(value) if isinstance(value, dict) else value - for key, value in nested_dict.items() - }) + return types.SimpleNamespace( + **{ + key: namespace_from_nested_dict(value) if isinstance(value, dict) else value + for key, value in nested_dict.items() + } + ) def make_local_dir(dir_name, base_dir=None, *, mode=0o777, is_package=False, is_cache=False): diff --git a/src/gt4py/cartesian/utils/meta.py b/src/gt4py/cartesian/utils/meta.py index a4d9bbc084..fb265b3a50 100644 --- a/src/gt4py/cartesian/utils/meta.py +++ b/src/gt4py/cartesian/utils/meta.py @@ -96,12 +96,14 @@ def _dump(node: ast.AST, excluded_names): for name, value in sorted(ast.iter_fields(node)) ] - return "".join([ - node.__class__.__name__, - "({content})".format( - content=", ".join("{}={}".format(name, value) for name, value in fields) - ), - ]) + return "".join( + [ + node.__class__.__name__, + "({content})".format( + content=", ".join("{}={}".format(name, value) for name, value in fields) + ), + ] + ) elif isinstance(node, list): lines = ["[", *[_dump(i, excluded_names) + "," for i in node], "]"] diff --git a/src/gt4py/eve/codegen.py b/src/gt4py/eve/codegen.py index 5033fae902..08e85a2f98 100644 --- a/src/gt4py/eve/codegen.py +++ b/src/gt4py/eve/codegen.py @@ -632,11 +632,13 @@ def __init_subclass__(cls, *, inherit_templates: bool = True, **kwargs: Any) -> ): templates.update(templated_gen_class.__templates__) - templates.update({ - key: value - for key, value in cls.__dict__.items() - if isinstance(value, Template) and not key.startswith("_") and not key.endswith("_") - }) + templates.update( + { + key: value + for key, value in cls.__dict__.items() + if isinstance(value, Template) and not key.startswith("_") and not key.endswith("_") + } + ) cls.__templates__ = types.MappingProxyType(templates) diff --git a/src/gt4py/eve/datamodels/__init__.py b/src/gt4py/eve/datamodels/__init__.py index d329eda67e..b0978f3ece 100644 --- a/src/gt4py/eve/datamodels/__init__.py +++ b/src/gt4py/eve/datamodels/__init__.py @@ -61,7 +61,7 @@ ... name: str ... amount: int ... - ... @validator('name') + ... @validator("name") ... def _name_validator(self, attribute, value): ... if len(value) < 3: ... raise ValueError( @@ -99,7 +99,7 @@ ... num_instances: ClassVar[int] = 0 ... ... def __init__(self, a: int, b: int) -> None: - ... self.__auto_init__(a/b) + ... self.__auto_init__(a / b) ... ... def __pre_init__(self) -> None: ... self.__class__.num_instances += 1 diff --git a/src/gt4py/eve/datamodels/core.py b/src/gt4py/eve/datamodels/core.py index d98023cb5a..5b6915fc38 100644 --- a/src/gt4py/eve/datamodels/core.py +++ b/src/gt4py/eve/datamodels/core.py @@ -446,9 +446,7 @@ def __init_subclass__( cls, /, *, - repr: ( # noqa: A002 [builtin-argument-shadowing] - bool | None | Literal["inherited"] - ) = "inherited", + repr: (bool | None | Literal["inherited"]) = "inherited", # noqa: A002 [builtin-argument-shadowing] eq: bool | None | Literal["inherited"] = "inherited", order: bool | None | Literal["inherited"] = "inherited", unsafe_hash: bool | None | Literal["inherited"] = "inherited", diff --git a/src/gt4py/eve/pattern_matching.py b/src/gt4py/eve/pattern_matching.py index f1c28b096b..fe11b0b0c7 100644 --- a/src/gt4py/eve/pattern_matching.py +++ b/src/gt4py/eve/pattern_matching.py @@ -52,9 +52,9 @@ def match(self, other: Any, *, raise_exception: bool = False) -> bool: if raise_exception: diffs = [*get_differences(self, other)] if len(diffs) > 0: - diffs_str = "\n ".join([ - f" {self.cls.__name__}{path}: {msg}" for path, msg in diffs - ]) + diffs_str = "\n ".join( + [f" {self.cls.__name__}{path}: {msg}" for path, msg in diffs] + ) raise ValueError(f"Object and pattern don't match:\n {diffs_str}") return True diff --git a/src/gt4py/eve/utils.py b/src/gt4py/eve/utils.py index aab530db43..b779a70d3e 100644 --- a/src/gt4py/eve/utils.py +++ b/src/gt4py/eve/utils.py @@ -987,12 +987,14 @@ def getitem(self, *indices: Union[int, str], default: Any = NOTHING) -> XIterabl >>> list(it.getitem(0)) ['a', 'b', 'c'] - >>> it = xiter([ - ... dict(name="AA", age=20, country="US"), - ... dict(name="BB", age=30, country="UK"), - ... dict(name="CC", age=40, country="EU"), - ... dict(country="CH"), - ... ]) + >>> it = xiter( + ... [ + ... dict(name="AA", age=20, country="US"), + ... dict(name="BB", age=30, country="UK"), + ... dict(name="CC", age=40, country="EU"), + ... dict(country="CH"), + ... ] + ... ) >>> list(it.getitem("name", "age", default=None)) [('AA', 20), ('BB', 30), ('CC', 40), (None, None)] diff --git a/src/gt4py/next/constructors.py b/src/gt4py/next/constructors.py index 12968fc4f3..7f92d57c1b 100644 --- a/src/gt4py/next/constructors.py +++ b/src/gt4py/next/constructors.py @@ -262,10 +262,12 @@ def as_field( raise ValueError(f"Origin keys {unknown_dims} not in domain {domain}.") else: origin = {} - actual_domain = common.domain([ - (d, (-(start_offset := origin.get(d, 0)), s - start_offset)) - for d, s in zip(domain, data.shape) - ]) + actual_domain = common.domain( + [ + (d, (-(start_offset := origin.get(d, 0)), s - start_offset)) + for d, s in zip(domain, data.shape) + ] + ) else: if origin: raise ValueError(f"Cannot specify origin for domain {domain}") diff --git a/src/gt4py/next/embedded/common.py b/src/gt4py/next/embedded/common.py index cdb0d3a5fd..cdfa439193 100644 --- a/src/gt4py/next/embedded/common.py +++ b/src/gt4py/next/embedded/common.py @@ -136,15 +136,19 @@ def restrict_to_intersection( >>> assert res == (common.domain({I: (1, 3), J: (1, 2)}), common.domain({I: (1, 3), J: (0, 3)})) """ ignore_dims_tuple = ignore_dims if isinstance(ignore_dims, tuple) else (ignore_dims,) - intersection_without_ignore_dims = domain_intersection(*[ - common.Domain(*[nr for nr in domain if nr.dim not in ignore_dims_tuple]) - for domain in domains - ]) + intersection_without_ignore_dims = domain_intersection( + *[ + common.Domain(*[nr for nr in domain if nr.dim not in ignore_dims_tuple]) + for domain in domains + ] + ) return tuple( - common.Domain(*[ - (nr if nr.dim in ignore_dims_tuple else intersection_without_ignore_dims[nr.dim]) - for nr in domain - ]) + common.Domain( + *[ + (nr if nr.dim in ignore_dims_tuple else intersection_without_ignore_dims[nr.dim]) + for nr in domain + ] + ) for domain in domains ) diff --git a/src/gt4py/next/embedded/nd_array_field.py b/src/gt4py/next/embedded/nd_array_field.py index e7f34bb2a2..af3ac0e646 100644 --- a/src/gt4py/next/embedded/nd_array_field.py +++ b/src/gt4py/next/embedded/nd_array_field.py @@ -61,9 +61,9 @@ def _builtin_op(*fields: common.Field | core_defs.Scalar) -> NdArrayField: xp = cls_.array_ns op = getattr(xp, array_builtin_name) - domain_intersection = embedded_common.domain_intersection(*[ - f.domain for f in fields if common.is_field(f) - ]) + domain_intersection = embedded_common.domain_intersection( + *[f.domain for f in fields if common.is_field(f)] + ) transformed: list[core_defs.NDArrayObject | core_defs.Scalar] = [] for f in fields: diff --git a/src/gt4py/next/embedded/operators.py b/src/gt4py/next/embedded/operators.py index b88083e7c2..b88d7e9b3c 100644 --- a/src/gt4py/next/embedded/operators.py +++ b/src/gt4py/next/embedded/operators.py @@ -57,9 +57,9 @@ def __call__( # type: ignore[override] domain_intersection = _intersect_scan_args(*all_args) non_scan_domain = common.Domain(*[nr for nr in domain_intersection if nr.dim != scan_axis]) - out_domain = common.Domain(*[ - scan_range if nr.dim == scan_axis else nr for nr in domain_intersection - ]) + out_domain = common.Domain( + *[scan_range if nr.dim == scan_axis else nr for nr in domain_intersection] + ) if scan_axis not in out_domain.dims: # even if the scan dimension is not in the input, we can scan over it out_domain = common.Domain(*out_domain, (scan_range)) @@ -89,9 +89,9 @@ def scan_loop(hpos: Sequence[common.NamedIndex]) -> None: def _get_out_domain( out: common.MutableField | tuple[common.MutableField | tuple, ...], ) -> common.Domain: - return embedded_common.domain_intersection(*[ - f.domain for f in utils.flatten_nested_tuple((out,)) - ]) + return embedded_common.domain_intersection( + *[f.domain for f in utils.flatten_nested_tuple((out,))] + ) def field_operator_call(op: EmbeddedOperator[_R, _P], args: Any, kwargs: Any) -> Optional[_R]: @@ -158,9 +158,9 @@ def impl(target: common.MutableField, source: common.Field) -> None: def _intersect_scan_args( *args: core_defs.Scalar | common.Field | tuple[core_defs.Scalar | common.Field | tuple, ...], ) -> common.Domain: - return embedded_common.domain_intersection(*[ - arg.domain for arg in utils.flatten_nested_tuple(args) if common.is_field(arg) - ]) + return embedded_common.domain_intersection( + *[arg.domain for arg in utils.flatten_nested_tuple(args) if common.is_field(arg)] + ) def _get_array_ns( diff --git a/src/gt4py/next/ffront/decorator.py b/src/gt4py/next/ffront/decorator.py index 36503989f0..4ef4d55e08 100644 --- a/src/gt4py/next/ffront/decorator.py +++ b/src/gt4py/next/ffront/decorator.py @@ -486,10 +486,12 @@ def as_program( # of arg and kwarg types # TODO(tehrengruber): check foast operator has no out argument that clashes # with the out argument of the program we generate here. - hash_ = eve_utils.content_hash(( - tuple(arg_types), - tuple((name, arg) for name, arg in kwarg_types.items()), - )) + hash_ = eve_utils.content_hash( + ( + tuple(arg_types), + tuple((name, arg) for name, arg in kwarg_types.items()), + ) + ) try: return self._program_cache[hash_] except KeyError: diff --git a/src/gt4py/next/ffront/lowering_utils.py b/src/gt4py/next/ffront/lowering_utils.py index 5b7e5f80d3..1adc566497 100644 --- a/src/gt4py/next/ffront/lowering_utils.py +++ b/src/gt4py/next/ffront/lowering_utils.py @@ -139,14 +139,16 @@ def _process_elements_impl( current_el_type: ts.TypeSpec, ) -> itir.Expr: if isinstance(current_el_type, ts.TupleType): - result = im.make_tuple(*[ - _process_elements_impl( - process_func, - [im.tuple_get(i, current_el_expr) for current_el_expr in _current_el_exprs], - current_el_type.types[i], - ) - for i in range(len(current_el_type.types)) - ]) + result = im.make_tuple( + *[ + _process_elements_impl( + process_func, + [im.tuple_get(i, current_el_expr) for current_el_expr in _current_el_exprs], + current_el_type.types[i], + ) + for i in range(len(current_el_type.types)) + ] + ) elif type_info.contains_local_field(current_el_type): raise NotImplementedError("Processing fields with local dimension is not implemented.") else: diff --git a/src/gt4py/next/iterator/transforms/global_tmps.py b/src/gt4py/next/iterator/transforms/global_tmps.py index c344faceb8..ddbc4e0c74 100644 --- a/src/gt4py/next/iterator/transforms/global_tmps.py +++ b/src/gt4py/next/iterator/transforms/global_tmps.py @@ -455,10 +455,12 @@ def from_expr(cls, node: ir.Node): return cls(node.fun.id, ranges) # type: ignore[attr-defined] # ensure by assert above def as_expr(self): - return im.call(self.grid_type)(*[ - im.call("named_range")(ir.AxisLiteral(value=d), r.start, r.stop) - for d, r in self.ranges.items() - ]) + return im.call(self.grid_type)( + *[ + im.call("named_range")(ir.AxisLiteral(value=d), r.start, r.stop) + for d, r in self.ranges.items() + ] + ) def domain_union(domains: list[SymbolicDomain]) -> SymbolicDomain: diff --git a/src/gt4py/next/iterator/type_inference.py b/src/gt4py/next/iterator/type_inference.py index 6c4a87e6c0..046b8418c5 100644 --- a/src/gt4py/next/iterator/type_inference.py +++ b/src/gt4py/next/iterator/type_inference.py @@ -618,23 +618,27 @@ def visit_Sym(self, node: ir.Sym, **kwargs) -> Type: result = TypeVar.fresh() if node.kind: kind = {"Iterator": Iterator(), "Value": Value()}[node.kind] - self.constraints.add(( - Val(kind=kind, current_loc=TypeVar.fresh(), defined_loc=TypeVar.fresh()), - result, - )) + self.constraints.add( + ( + Val(kind=kind, current_loc=TypeVar.fresh(), defined_loc=TypeVar.fresh()), + result, + ) + ) if node.dtype: assert node.dtype is not None dtype: Primitive | List = Primitive(name=node.dtype[0]) if node.dtype[1]: dtype = List(dtype=dtype) - self.constraints.add(( - Val( - dtype=dtype, - current_loc=TypeVar.fresh(), - defined_loc=TypeVar.fresh(), - ), - result, - )) + self.constraints.add( + ( + Val( + dtype=dtype, + current_loc=TypeVar.fresh(), + defined_loc=TypeVar.fresh(), + ), + result, + ) + ) return result def visit_SymRef(self, node: ir.SymRef, *, symtable, **kwargs) -> Type: @@ -750,16 +754,18 @@ def _visit_neighbors(self, node: ir.FunCall, **kwargs) -> Type: dtype_ = TypeVar.fresh() size = TypeVar.fresh() it = self.visit(node.args[1], **kwargs) - self.constraints.add(( - it, - Val( - kind=Iterator(), - dtype=dtype_, - size=size, - current_loc=current_loc_in, - defined_loc=current_loc_out, - ), - )) + self.constraints.add( + ( + it, + Val( + kind=Iterator(), + dtype=dtype_, + size=size, + current_loc=current_loc_in, + defined_loc=current_loc_out, + ), + ) + ) lst = List( dtype=dtype_, max_length=max_length, @@ -777,14 +783,16 @@ def _visit_cast_(self, node: ir.FunCall, **kwargs) -> Type: size = TypeVar.fresh() - self.constraints.add(( - val_arg_type, - Val( - kind=Value(), - dtype=TypeVar.fresh(), - size=size, - ), - )) + self.constraints.add( + ( + val_arg_type, + Val( + kind=Value(), + dtype=TypeVar.fresh(), + size=size, + ), + ) + ) return Val( kind=Value(), @@ -827,10 +835,12 @@ def _visit_shift(self, node: ir.FunCall, **kwargs) -> Type: def _visit_domain(self, node: ir.FunCall, **kwargs) -> Type: for arg in node.args: - self.constraints.add(( - Val(kind=Value(), dtype=NAMED_RANGE_DTYPE, size=Scalar()), - self.visit(arg, **kwargs), - )) + self.constraints.add( + ( + Val(kind=Value(), dtype=NAMED_RANGE_DTYPE, size=Scalar()), + self.visit(arg, **kwargs), + ) + ) return Val(kind=Value(), dtype=DOMAIN_DTYPE, size=Scalar()) def _visit_cartesian_domain(self, node: ir.FunCall, **kwargs) -> Type: @@ -884,45 +894,53 @@ def visit_StencilClosure( output = self.visit(node.output, **kwargs) output_dtype = TypeVar.fresh() output_loc = TypeVar.fresh() - self.constraints.add(( - domain, - Val(kind=Value(), dtype=Primitive(name="domain"), size=Scalar()), - )) - self.constraints.add(( - output, - Val( - kind=Iterator(), - dtype=output_dtype, - size=Column(), - defined_loc=output_loc, - ), - )) + self.constraints.add( + ( + domain, + Val(kind=Value(), dtype=Primitive(name="domain"), size=Scalar()), + ) + ) + self.constraints.add( + ( + output, + Val( + kind=Iterator(), + dtype=output_dtype, + size=Column(), + defined_loc=output_loc, + ), + ) + ) inputs: list[Type] = self.visit(node.inputs, **kwargs) stencil_params = [] for input_ in inputs: stencil_param = Val(current_loc=output_loc, defined_loc=TypeVar.fresh()) - self.constraints.add(( - input_, - Val( - kind=stencil_param.kind, - dtype=stencil_param.dtype, - size=stencil_param.size, - # closure input and stencil param differ in `current_loc` - current_loc=ANYWHERE, - # TODO(tehrengruber): Seems to break for scalars. Use `TypeVar.fresh()`? - defined_loc=stencil_param.defined_loc, - ), - )) + self.constraints.add( + ( + input_, + Val( + kind=stencil_param.kind, + dtype=stencil_param.dtype, + size=stencil_param.size, + # closure input and stencil param differ in `current_loc` + current_loc=ANYWHERE, + # TODO(tehrengruber): Seems to break for scalars. Use `TypeVar.fresh()`? + defined_loc=stencil_param.defined_loc, + ), + ) + ) stencil_params.append(stencil_param) - self.constraints.add(( - stencil, - FunctionType( - args=Tuple.from_elems(*stencil_params), - ret=Val(kind=Value(), dtype=output_dtype, size=Column()), - ), - )) + self.constraints.add( + ( + stencil, + FunctionType( + args=Tuple.from_elems(*stencil_params), + ret=Val(kind=Value(), dtype=output_dtype, size=Column()), + ), + ) + ) return Closure(output=output, inputs=Tuple.from_elems(*inputs)) def visit_FencilWithTemporaries(self, node: FencilWithTemporaries, **kwargs): @@ -993,10 +1011,12 @@ def infer_all( ) if reindex: - unified_types, unsatisfiable_constraints = reindex_vars(( - unified_types, - unsatisfiable_constraints, - )) + unified_types, unsatisfiable_constraints = reindex_vars( + ( + unified_types, + unsatisfiable_constraints, + ) + ) result = { id_: unified_type diff --git a/src/gt4py/next/otf/compilation/build_systems/cmake_lists.py b/src/gt4py/next/otf/compilation/build_systems/cmake_lists.py index 2fcb7ad0d9..e3f912455a 100644 --- a/src/gt4py/next/otf/compilation/build_systems/cmake_lists.py +++ b/src/gt4py/next/otf/compilation/build_systems/cmake_lists.py @@ -119,11 +119,13 @@ def visit_LinkDependency(self, dep: LinkDependency) -> str: cfg = "" if dep.name == "nanobind": - cfg = "\n".join([ - "nanobind_build_library(nanobind-static)", - f"nanobind_compile_options({dep.target})", - f"nanobind_link_options({dep.target})", - ]) + cfg = "\n".join( + [ + "nanobind_build_library(nanobind-static)", + f"nanobind_compile_options({dep.target})", + f"nanobind_link_options({dep.target})", + ] + ) lnk = f"target_link_libraries({dep.target} PUBLIC {lib_name})" return cfg + "\n" + lnk diff --git a/src/gt4py/next/program_processors/runners/dace_iterator/itir_to_tasklet.py b/src/gt4py/next/program_processors/runners/dace_iterator/itir_to_tasklet.py index 1b8e7aa0aa..9267270653 100644 --- a/src/gt4py/next/program_processors/runners/dace_iterator/itir_to_tasklet.py +++ b/src/gt4py/next/program_processors/runners/dace_iterator/itir_to_tasklet.py @@ -1200,9 +1200,9 @@ def _visit_deref(self, node: itir.FunCall) -> list[ValueExpr]: map_ranges = { index_name: f"0:{offset_provider.max_neighbors}", } - src_subset = ",".join([ - f"_i_{dim}" if dim in iterator.indices else index_name for dim in sorted_dims - ]) + src_subset = ",".join( + [f"_i_{dim}" if dim in iterator.indices else index_name for dim in sorted_dims] + ) self.context.state.add_mapped_tasklet( "deref", map_ranges, diff --git a/src/gt4py/next/program_processors/runners/dace_iterator/utility.py b/src/gt4py/next/program_processors/runners/dace_iterator/utility.py index 970ea3b1ca..fd679f040c 100644 --- a/src/gt4py/next/program_processors/runners/dace_iterator/utility.py +++ b/src/gt4py/next/program_processors/runners/dace_iterator/utility.py @@ -199,7 +199,7 @@ def new_array_symbols(name: str, ndim: int) -> tuple[list[dace.symbol], list[dac def flatten_list(node_list: list[Any]) -> list[Any]: return list( - itertools.chain.from_iterable([ - flatten_list(e) if e.__class__ == list else [e] for e in node_list - ]) + itertools.chain.from_iterable( + [flatten_list(e) if e.__class__ == list else [e] for e in node_list] + ) ) diff --git a/src/gt4py/next/program_processors/runners/gtfn.py b/src/gt4py/next/program_processors/runners/gtfn.py index 75944e1dd4..ad24b19dbf 100644 --- a/src/gt4py/next/program_processors/runners/gtfn.py +++ b/src/gt4py/next/program_processors/runners/gtfn.py @@ -104,16 +104,18 @@ def extract_connectivity_args( def compilation_hash(otf_closure: stages.ProgramCall) -> int: """Given closure compute a hash uniquely determining if we need to recompile.""" offset_provider = otf_closure.kwargs["offset_provider"] - return hash(( - otf_closure.program, - # As the frontend types contain lists they are not hashable. As a workaround we just - # use content_hash here. - content_hash(tuple(from_value(arg) for arg in otf_closure.args)), - id(offset_provider) if offset_provider else None, - otf_closure.kwargs.get("column_axis", None), - # TODO(tehrengruber): Remove `lift_mode` from call interface. - otf_closure.kwargs.get("lift_mode", None), - )) + return hash( + ( + otf_closure.program, + # As the frontend types contain lists they are not hashable. As a workaround we just + # use content_hash here. + content_hash(tuple(from_value(arg) for arg in otf_closure.args)), + id(offset_provider) if offset_provider else None, + otf_closure.kwargs.get("column_axis", None), + # TODO(tehrengruber): Remove `lift_mode` from call interface. + otf_closure.kwargs.get("lift_mode", None), + ) + ) class GTFNCompileWorkflowFactory(factory.Factory): diff --git a/src/gt4py/next/type_system/type_info.py b/src/gt4py/next/type_system/type_info.py index a0ec811900..03702517de 100644 --- a/src/gt4py/next/type_system/type_info.py +++ b/src/gt4py/next/type_system/type_info.py @@ -168,16 +168,18 @@ def apply_to_primitive_constituents( tuple[Field[[], int64], Field[[], int64]] """ if isinstance(symbol_type, ts.TupleType): - return tuple_constructor(*[ - apply_to_primitive_constituents( - el, - fun, - _path=(*_path, i), - with_path_arg=with_path_arg, - tuple_constructor=tuple_constructor, - ) - for i, el in enumerate(symbol_type.types) - ]) + return tuple_constructor( + *[ + apply_to_primitive_constituents( + el, + fun, + _path=(*_path, i), + with_path_arg=with_path_arg, + tuple_constructor=tuple_constructor, + ) + for i, el in enumerate(symbol_type.types) + ] + ) if with_path_arg: return fun(symbol_type, _path) # type: ignore[call-arg] # mypy not aware of `with_path_arg` else: diff --git a/tests/cartesian_tests/unit_tests/frontend_tests/test_gtscript_frontend.py b/tests/cartesian_tests/unit_tests/frontend_tests/test_gtscript_frontend.py index 30d1ab3ad1..889fa0d145 100644 --- a/tests/cartesian_tests/unit_tests/frontend_tests/test_gtscript_frontend.py +++ b/tests/cartesian_tests/unit_tests/frontend_tests/test_gtscript_frontend.py @@ -1202,14 +1202,16 @@ def definition_func(inout_field: gtscript.Field[float]): @pytest.mark.parametrize( "id_case,import_line", list( - enumerate([ - "import gt4py", - "from externals import EXTERNAL", - "from gt4py.cartesian import __gtscript__", - "from gt4py.cartesian import __externals__", - "from gt4py.cartesian.gtscript import computation", - "from gt4py.cartesian.externals import EXTERNAL", - ]) + enumerate( + [ + "import gt4py", + "from externals import EXTERNAL", + "from gt4py.cartesian import __gtscript__", + "from gt4py.cartesian import __externals__", + "from gt4py.cartesian.gtscript import computation", + "from gt4py.cartesian.externals import EXTERNAL", + ] + ) ), ) def test_wrong_imports(self, id_case, import_line): @@ -1238,17 +1240,19 @@ class TestDTypes: @pytest.mark.parametrize( "id_case,test_dtype", list( - enumerate([ - bool, - np.bool_, - int, - np.int32, - np.int64, - float, - np.float32, - np.float64, - np.dtype((np.float32, (3,))), - ]) + enumerate( + [ + bool, + np.bool_, + int, + np.int32, + np.int64, + float, + np.float32, + np.float64, + np.dtype((np.float32, (3,))), + ] + ) ), ) def test_all_legal_dtypes_instance(self, id_case, test_dtype): diff --git a/tests/eve_tests/unit_tests/test_extended_typing.py b/tests/eve_tests/unit_tests/test_extended_typing.py index ec2d5a8fe8..45f227d790 100644 --- a/tests/eve_tests/unit_tests/test_extended_typing.py +++ b/tests/eve_tests/unit_tests/test_extended_typing.py @@ -326,10 +326,12 @@ def test_is_actual_wrong_type(t): (List[int], type(List[int])), ] if sys.version_info >= (3, 9): - ACTUAL_TYPE_SAMPLES.extend([ - (tuple[int, float], types.GenericAlias), # type: ignore[misc] # ignore false positive bug: https://github.com/python/mypy/issues/11098 - (list[int], types.GenericAlias), - ]) + ACTUAL_TYPE_SAMPLES.extend( + [ + (tuple[int, float], types.GenericAlias), # type: ignore[misc] # ignore false positive bug: https://github.com/python/mypy/issues/11098 + (list[int], types.GenericAlias), + ] + ) @pytest.mark.parametrize(["instance", "expected"], ACTUAL_TYPE_SAMPLES) diff --git a/tests/eve_tests/unit_tests/test_type_validation.py b/tests/eve_tests/unit_tests/test_type_validation.py index c60c134d77..d9977f0d3a 100644 --- a/tests/eve_tests/unit_tests/test_type_validation.py +++ b/tests/eve_tests/unit_tests/test_type_validation.py @@ -153,13 +153,15 @@ class SampleDataClass: class SampleSlottedDataClass: b: float - SAMPLE_TYPE_DEFINITIONS.append(( - SampleSlottedDataClass, - [SampleSlottedDataClass(1.0), SampleSlottedDataClass(1)], - [object(), float(1.2), int(1), "1.2", SampleSlottedDataClass], - None, - None, - )) + SAMPLE_TYPE_DEFINITIONS.append( + ( + SampleSlottedDataClass, + [SampleSlottedDataClass(1.0), SampleSlottedDataClass(1)], + [object(), float(1.2), int(1), "1.2", SampleSlottedDataClass], + None, + None, + ) + ) @pytest.mark.parametrize("validator", VALIDATORS) diff --git a/tests/next_tests/integration_tests/feature_tests/ffront_tests/test_execution.py b/tests/next_tests/integration_tests/feature_tests/ffront_tests/test_execution.py index a2c203b163..8b6d414f0f 100644 --- a/tests/next_tests/integration_tests/feature_tests/ffront_tests/test_execution.py +++ b/tests/next_tests/integration_tests/feature_tests/ffront_tests/test_execution.py @@ -814,10 +814,12 @@ def test_scan_nested_tuple_input(cartesian_case): def prev_levels_iterator(i): return range(i + 1) - expected = np.asarray([ - reduce(lambda prev, i: prev + inp1_np[i] + inp2_np[i], prev_levels_iterator(i), init) - for i in range(k_size) - ]) + expected = np.asarray( + [ + reduce(lambda prev, i: prev + inp1_np[i] + inp2_np[i], prev_levels_iterator(i), init) + for i in range(k_size) + ] + ) @gtx.scan_operator(axis=KDim, forward=True, init=init) def simple_scan_operator(carry: float, a: tuple[float, float]) -> float: @@ -832,10 +834,12 @@ def test_scan_different_domain_in_tuple(cartesian_case): i_size = cartesian_case.default_sizes[IDim] k_size = cartesian_case.default_sizes[KDim] - inp1_np = np.ones(( - i_size + 1, - k_size, - )) # i_size bigger than in the other argument + inp1_np = np.ones( + ( + i_size + 1, + k_size, + ) + ) # i_size bigger than in the other argument inp2_np = np.fromfunction(lambda i, k: k, shape=(i_size, k_size), dtype=float) inp1 = cartesian_case.as_field([IDim, KDim], inp1_np) inp2 = cartesian_case.as_field([IDim, KDim], inp2_np) @@ -844,14 +848,16 @@ def test_scan_different_domain_in_tuple(cartesian_case): def prev_levels_iterator(i): return range(i + 1) - expected = np.asarray([ - reduce( - lambda prev, k: prev + inp1_np[:-1, k] + inp2_np[:, k], - prev_levels_iterator(k), - init, - ) - for k in range(k_size) - ]).transpose() + expected = np.asarray( + [ + reduce( + lambda prev, k: prev + inp1_np[:-1, k] + inp2_np[:, k], + prev_levels_iterator(k), + init, + ) + for k in range(k_size) + ] + ).transpose() @gtx.scan_operator(axis=KDim, forward=True, init=init) def scan_op(carry: float, a: tuple[float, float]) -> float: @@ -879,14 +885,16 @@ def test_scan_tuple_field_scalar_mixed(cartesian_case): def prev_levels_iterator(i): return range(i + 1) - expected = np.asarray([ - reduce( - lambda prev, k: prev + 1.0 + inp2_np[:, k], - prev_levels_iterator(k), - init, - ) - for k in range(k_size) - ]).transpose() + expected = np.asarray( + [ + reduce( + lambda prev, k: prev + 1.0 + inp2_np[:, k], + prev_levels_iterator(k), + init, + ) + for k in range(k_size) + ] + ).transpose() @gtx.scan_operator(axis=KDim, forward=True, init=init) def scan_op(carry: float, a: tuple[float, float]) -> float: diff --git a/tests/next_tests/integration_tests/feature_tests/ffront_tests/test_program.py b/tests/next_tests/integration_tests/feature_tests/ffront_tests/test_program.py index 19cbbb4ba2..df0009d0d4 100644 --- a/tests/next_tests/integration_tests/feature_tests/ffront_tests/test_program.py +++ b/tests/next_tests/integration_tests/feature_tests/ffront_tests/test_program.py @@ -72,9 +72,9 @@ def shift_by_one(in_field: cases.IFloatField) -> cases.IFloatField: def shift_by_one_program(in_field: cases.IFloatField, out_field: cases.IFloatField): shift_by_one(in_field, out=out_field[:-1]) - in_field = cases.allocate(cartesian_case, shift_by_one_program, "in_field").extend({ - IDim: (0, 1) - })() + in_field = cases.allocate(cartesian_case, shift_by_one_program, "in_field").extend( + {IDim: (0, 1)} + )() out_field = cases.allocate(cartesian_case, shift_by_one_program, "out_field")() cases.verify( @@ -107,10 +107,12 @@ def test_copy_restricted_execution(cartesian_case, copy_restrict_program_def): cases.verify_with_default_data( cartesian_case, copy_restrict_program, - ref=lambda in_field: np.array([ - in_field[i] if i in range(1, 2) else 0 - for i in range(0, cartesian_case.default_sizes[IDim]) - ]), + ref=lambda in_field: np.array( + [ + in_field[i] if i in range(1, 2) else 0 + for i in range(0, cartesian_case.default_sizes[IDim]) + ] + ), ) diff --git a/tests/next_tests/integration_tests/multi_feature_tests/fvm_nabla_setup.py b/tests/next_tests/integration_tests/multi_feature_tests/fvm_nabla_setup.py index 998b351255..03e1af27dd 100644 --- a/tests/next_tests/integration_tests/multi_feature_tests/fvm_nabla_setup.py +++ b/tests/next_tests/integration_tests/multi_feature_tests/fvm_nabla_setup.py @@ -51,9 +51,9 @@ def __init__(self, *, grid=StructuredGrid("O32"), config=None): build_node_to_edge_connectivity(mesh) build_median_dual_mesh(mesh) - edges_per_node = max([ - mesh.nodes.edge_connectivity.cols(node) for node in range(0, fs_nodes.size) - ]) + edges_per_node = max( + [mesh.nodes.edge_connectivity.cols(node) for node in range(0, fs_nodes.size)] + ) self.mesh = mesh self.fs_edges = fs_edges diff --git a/tests/next_tests/unit_tests/embedded_tests/test_nd_array_field.py b/tests/next_tests/unit_tests/embedded_tests/test_nd_array_field.py index 7171bb5ecc..375b654475 100644 --- a/tests/next_tests/unit_tests/embedded_tests/test_nd_array_field.py +++ b/tests/next_tests/unit_tests/embedded_tests/test_nd_array_field.py @@ -389,37 +389,45 @@ def test_cartesian_remap_implementation(): @pytest.mark.parametrize( "new_dims,field,expected_domain", [ - (( - (D0,), - common._field( - np.arange(10), domain=common.Domain(dims=(D0,), ranges=(UnitRange(0, 10),)) - ), - Domain(dims=(D0,), ranges=(UnitRange(0, 10),)), - )), - (( - (D0, D1), - common._field( - np.arange(10), domain=common.Domain(dims=(D0,), ranges=(UnitRange(0, 10),)) - ), - Domain(dims=(D0, D1), ranges=(UnitRange(0, 10), UnitRange.infinite())), - )), - (( - (D0, D1), - common._field( - np.arange(10), domain=common.Domain(dims=(D1,), ranges=(UnitRange(0, 10),)) - ), - Domain(dims=(D0, D1), ranges=(UnitRange.infinite(), UnitRange(0, 10))), - )), - (( - (D0, D1, D2), - common._field( - np.arange(10), domain=common.Domain(dims=(D1,), ranges=(UnitRange(0, 10),)) - ), - Domain( - dims=(D0, D1, D2), - ranges=(UnitRange.infinite(), UnitRange(0, 10), UnitRange.infinite()), - ), - )), + ( + ( + (D0,), + common._field( + np.arange(10), domain=common.Domain(dims=(D0,), ranges=(UnitRange(0, 10),)) + ), + Domain(dims=(D0,), ranges=(UnitRange(0, 10),)), + ) + ), + ( + ( + (D0, D1), + common._field( + np.arange(10), domain=common.Domain(dims=(D0,), ranges=(UnitRange(0, 10),)) + ), + Domain(dims=(D0, D1), ranges=(UnitRange(0, 10), UnitRange.infinite())), + ) + ), + ( + ( + (D0, D1), + common._field( + np.arange(10), domain=common.Domain(dims=(D1,), ranges=(UnitRange(0, 10),)) + ), + Domain(dims=(D0, D1), ranges=(UnitRange.infinite(), UnitRange(0, 10))), + ) + ), + ( + ( + (D0, D1, D2), + common._field( + np.arange(10), domain=common.Domain(dims=(D1,), ranges=(UnitRange(0, 10),)) + ), + Domain( + dims=(D0, D1, D2), + ranges=(UnitRange.infinite(), UnitRange(0, 10), UnitRange.infinite()), + ), + ) + ), ], ) def test_field_broadcast(new_dims, field, expected_domain): @@ -774,10 +782,12 @@ def test_connectivity_field_inverse_image_2d_domain(): c2v_conn = common._connectivity( np.asarray([[0, 0, 2], [1, 1, 2], [2, 2, 2]]), - domain=common.domain([ - common.named_range((C, (C_START, C_STOP))), - common.named_range((C2V, (C2V_START, C2V_STOP))), - ]), + domain=common.domain( + [ + common.named_range((C, (C_START, C_STOP))), + common.named_range((C2V, (C2V_START, C2V_STOP))), + ] + ), codomain=V, ) @@ -853,10 +863,12 @@ def test_connectivity_field_inverse_image_2d_domain_skip_values(): c2v_conn = common._connectivity( np.asarray([[-1, 0, 2, -1], [1, 1, 2, 2], [2, 2, -1, -1], [-1, 2, -1, -1]]), - domain=common.domain([ - common.named_range((C, (C_START, C_STOP))), - common.named_range((C2V, (C2V_START, C2V_STOP))), - ]), + domain=common.domain( + [ + common.named_range((C, (C_START, C_STOP))), + common.named_range((C2V, (C2V_START, C2V_STOP))), + ] + ), codomain=V, skip_value=-1, ) diff --git a/tests/next_tests/unit_tests/errors_tests/test_exceptions.py b/tests/next_tests/unit_tests/errors_tests/test_exceptions.py index 6840821d74..60a382d989 100644 --- a/tests/next_tests/unit_tests/errors_tests/test_exceptions.py +++ b/tests/next_tests/unit_tests/errors_tests/test_exceptions.py @@ -59,11 +59,13 @@ def test_str(loc_plain, message): def test_str_snippet(loc_snippet, message): - pattern = r"\n".join([ - f"{message}", - ' File ".*", line.*', - " # This very line of comment should be shown in the snippet.", - r" \^\^\^\^\^\^\^\^\^\^\^\^\^\^", - ]) + pattern = r"\n".join( + [ + f"{message}", + ' File ".*", line.*', + " # This very line of comment should be shown in the snippet.", + r" \^\^\^\^\^\^\^\^\^\^\^\^\^\^", + ] + ) s = str(errors.DSLError(loc_snippet, message)) assert re.match(pattern, s) diff --git a/tests/next_tests/unit_tests/iterator_tests/test_pretty_printer.py b/tests/next_tests/unit_tests/iterator_tests/test_pretty_printer.py index 3f42307a7e..8e0806baa2 100644 --- a/tests/next_tests/unit_tests/iterator_tests/test_pretty_printer.py +++ b/tests/next_tests/unit_tests/iterator_tests/test_pretty_printer.py @@ -54,21 +54,27 @@ def test_indent(): def test_cost(): - assert PrettyPrinter()._cost(["This is a single line."]) < PrettyPrinter()._cost([ - "These are", - "multiple", - "short", - "lines.", - ]) - assert PrettyPrinter()._cost(["This is a short line."]) < PrettyPrinter()._cost([ - "This is a very long line; longer than the maximum allowed line length. " - "So it should get a penalty for its length." - ]) - assert PrettyPrinter()._cost([ - "Equal length!", - "Equal length!", - "Equal length!", - ]) < PrettyPrinter()._cost(["Unequal length.", "Short…", "Looooooooooooooooooong…"]) + assert PrettyPrinter()._cost(["This is a single line."]) < PrettyPrinter()._cost( + [ + "These are", + "multiple", + "short", + "lines.", + ] + ) + assert PrettyPrinter()._cost(["This is a short line."]) < PrettyPrinter()._cost( + [ + "This is a very long line; longer than the maximum allowed line length. " + "So it should get a penalty for its length." + ] + ) + assert PrettyPrinter()._cost( + [ + "Equal length!", + "Equal length!", + "Equal length!", + ] + ) < PrettyPrinter()._cost(["Unequal length.", "Short…", "Looooooooooooooooooong…"]) def test_optimum():