diff --git a/.constraints/py3.10.txt b/.constraints/py3.10.txt index 301c6e43..f1c81ea5 100644 --- a/.constraints/py3.10.txt +++ b/.constraints/py3.10.txt @@ -1,22 +1,21 @@ # This file was autogenerated by uv via the following command: # uv pip compile pyproject.toml -o .constraints/py3.10.txt --all-extras --no-annotate --python-version=3.10 --no-emit-package setuptools accessible-pygments==0.0.5 -alabaster==0.7.16 -anyio==4.3.0 +alabaster==1.0.0 +anyio==4.4.0 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 arrow==1.3.0 asttokens==2.4.1 async-lru==2.0.4 -attrs==23.2.0 +attrs==24.2.0 babel==2.15.0 beautifulsoup4==4.12.3 -black==24.4.2 bleach==6.1.0 -cachetools==5.3.3 +cachetools==5.4.0 cattrs==23.2.3 -certifi==2024.2.2 -cffi==1.16.0 +certifi==2024.7.4 +cffi==1.17.0 cfgv==3.4.0 chardet==5.2.0 charset-normalizer==3.3.2 @@ -25,17 +24,17 @@ colorama==0.4.6 comm==0.2.2 contourpy==1.2.1 cycler==0.12.1 -debugpy==1.8.1 +debugpy==1.8.5 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.8 docstring-to-markdown==0.15 docutils==0.21.2 -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 executing==2.0.1 -fastjsonschema==2.19.1 -filelock==3.14.0 -fonttools==4.51.0 +fastjsonschema==2.20.0 +filelock==3.15.4 +fonttools==4.53.1 fqdn==1.5.1 gitdb==4.0.11 gitpython==3.1.43 @@ -44,23 +43,22 @@ greenlet==3.0.3 h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 -identify==2.5.36 +identify==2.6.0 idna==3.7 imagesize==1.4.1 -importlib-metadata==7.1.0 +importlib-metadata==8.2.0 iniconfig==2.0.0 -ipykernel==6.29.4 +ipykernel==6.29.5 ipympl==0.9.4 -ipython==8.24.0 +ipython==8.26.0 ipython-genutils==0.2.0 -ipywidgets==8.1.2 +ipywidgets==8.1.3 isoduration==20.11.0 -isort==5.13.2 jedi==0.19.1 jinja2==3.1.4 json5==0.9.25 -jsonpointer==2.4 -jsonschema==4.22.0 +jsonpointer==3.0.0 +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 jupyter==1.0.0 jupyter-cache==1.0.0 @@ -69,17 +67,17 @@ jupyter-console==6.6.3 jupyter-core==5.7.2 jupyter-events==0.10.0 jupyter-lsp==2.2.5 -jupyter-server==2.14.0 +jupyter-server==2.14.2 jupyter-server-mathjax==0.2.6 jupyter-server-terminals==0.5.3 -jupyterlab==4.2.1 -jupyterlab-code-formatter==2.2.1 +jupyterlab==4.2.4 +jupyterlab-code-formatter==3.0.1 jupyterlab-git==0.50.1 jupyterlab-lsp==5.1.0 jupyterlab-myst==2.4.2 jupyterlab-pygments==0.3.0 -jupyterlab-server==2.27.2 -jupyterlab-widgets==3.0.10 +jupyterlab-server==2.27.3 +jupyterlab-widgets==3.0.11 kiwisolver==1.4.5 latexcodec==3.0.0 lsprotocol==2023.0.1 @@ -91,111 +89,109 @@ mdit-py-plugins==0.4.1 mdurl==0.1.2 mistune==3.0.2 mpl-interactions==0.24.1 -mypy-extensions==1.0.0 -myst-nb==1.1.0 -myst-parser==3.0.1 +myst-nb==1.1.1 +myst-parser==4.0.0 nbclient==0.6.8 nbconvert==7.16.4 nbdime==4.0.1 nbformat==5.10.4 -nbmake==1.5.3 +nbmake==1.5.4 nest-asyncio==1.6.0 -nodeenv==1.8.0 -notebook==7.2.0 +nodeenv==1.9.1 +notebook==7.2.1 notebook-shim==0.2.4 numpy==1.26.4 overrides==7.7.0 -packaging==24.0 +packaging==24.1 pandocfilters==1.5.1 parso==0.8.4 -pathspec==0.12.1 pexpect==4.9.0 -pillow==10.3.0 +pillow==10.4.0 platformdirs==4.2.2 pluggy==1.5.0 -pre-commit==3.7.1 +pre-commit==3.8.0 prometheus-client==0.20.0 -prompt-toolkit==3.0.43 -psutil==5.9.8 +prompt-toolkit==3.0.47 +psutil==6.0.0 ptyprocess==0.7.0 -pure-eval==0.2.2 +pure-eval==0.2.3 pybtex==0.24.0 pybtex-docutils==1.0.3 pycparser==2.22 -pydata-sphinx-theme==0.15.2 +pydata-sphinx-theme==0.15.4 pygments==2.18.0 pyparsing==3.1.2 -pyproject-api==1.6.1 -pytest==8.2.1 +pyproject-api==1.7.1 +pytest==8.3.2 python-dateutil==2.9.0.post0 python-json-logger==2.0.7 python-lsp-jsonrpc==1.1.2 -python-lsp-ruff==2.2.1 +python-lsp-ruff==2.2.2 python-lsp-server==1.11.0 pytoolconfig==1.3.1 -pyyaml==6.0.1 -pyzmq==26.0.3 +pyyaml==6.0.2 +pyzmq==26.1.0 qtconsole==5.5.2 qtpy==2.4.1 referencing==0.35.1 -requests==2.32.2 +requests==2.32.3 rfc3339-validator==0.1.4 rfc3986-validator==0.1.1 rope==1.13.0 -rpds-py==0.18.1 -ruff==0.4.5 +rpds-py==0.20.0 +ruff==0.5.6 send2trash==1.8.3 six==1.16.0 smmap==5.0.1 sniffio==1.3.1 snowballstemmer==2.2.0 soupsieve==2.5 -sphinx==7.3.7 +sphinx==8.0.2 sphinx-api-relink==0.0.9 sphinx-autobuild==2024.4.16 -sphinx-book-theme==1.1.2 -sphinx-codeautolink==0.15.1 +sphinx-book-theme==1.1.3 +sphinx-codeautolink==0.15.2 sphinx-comments==0.0.3 sphinx-copybutton==0.5.2 -sphinx-design==0.6.0 +sphinx-design==0.6.1 sphinx-hep-pdgref==0.2.0 sphinx-pybtex-etal-style==0.0.2 sphinx-remove-toctrees==1.0.0.post1 sphinx-thebe==0.3.1 sphinx-togglebutton==0.3.2 -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 sphinxcontrib-bibtex==2.6.2 -sphinxcontrib-devhelp==1.0.6 -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-devhelp==2.0.0 +sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.7 -sphinxcontrib-serializinghtml==1.1.10 -sqlalchemy==2.0.30 +sphinxcontrib-qthelp==2.0.0 +sphinxcontrib-serializinghtml==2.0.0 +sqlalchemy==2.0.32 stack-data==0.6.3 -starlette==0.37.2 +starlette==0.38.2 tabulate==0.9.0 terminado==0.18.1 tinycss2==1.3.0 tomli==2.0.1 -tornado==6.4 -tox==4.15.0 +tornado==6.4.1 +tox==4.17.0 traitlets==5.14.3 types-python-dateutil==2.9.0.20240316 -typing-extensions==4.11.0 +typing-extensions==4.12.2 ujson==5.10.0 uri-template==1.3.0 -urllib3==2.2.1 -uvicorn==0.29.0 -virtualenv==20.26.2 -watchfiles==0.21.0 +urllib3==2.2.2 +uvicorn==0.30.5 +virtualenv==20.26.3 +watchfiles==0.23.0 wcwidth==0.2.13 -webcolors==1.13 +webcolors==24.6.0 webencodings==0.5.1 websocket-client==1.8.0 websockets==12.0 -wheel==0.43.0 -widgetsnbextension==4.0.10 -zipp==3.18.2 +wheel==0.44.0 +widgetsnbextension==4.0.11 +zipp==3.19.2 # The following packages were excluded from the output: # setuptools diff --git a/.constraints/py3.11.txt b/.constraints/py3.11.txt index 70eca977..c49f8e96 100644 --- a/.constraints/py3.11.txt +++ b/.constraints/py3.11.txt @@ -1,22 +1,21 @@ # This file was autogenerated by uv via the following command: # uv pip compile pyproject.toml -o .constraints/py3.11.txt --all-extras --no-annotate --python-version=3.11 --no-emit-package setuptools accessible-pygments==0.0.5 -alabaster==0.7.16 -anyio==4.3.0 +alabaster==1.0.0 +anyio==4.4.0 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 arrow==1.3.0 asttokens==2.4.1 async-lru==2.0.4 -attrs==23.2.0 +attrs==24.2.0 babel==2.15.0 beautifulsoup4==4.12.3 -black==24.4.2 bleach==6.1.0 -cachetools==5.3.3 +cachetools==5.4.0 cattrs==23.2.3 -certifi==2024.2.2 -cffi==1.16.0 +certifi==2024.7.4 +cffi==1.17.0 cfgv==3.4.0 chardet==5.2.0 charset-normalizer==3.3.2 @@ -25,16 +24,16 @@ colorama==0.4.6 comm==0.2.2 contourpy==1.2.1 cycler==0.12.1 -debugpy==1.8.1 +debugpy==1.8.5 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.8 docstring-to-markdown==0.15 docutils==0.21.2 executing==2.0.1 -fastjsonschema==2.19.1 -filelock==3.14.0 -fonttools==4.51.0 +fastjsonschema==2.20.0 +filelock==3.15.4 +fonttools==4.53.1 fqdn==1.5.1 gitdb==4.0.11 gitpython==3.1.43 @@ -43,23 +42,22 @@ greenlet==3.0.3 h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 -identify==2.5.36 +identify==2.6.0 idna==3.7 imagesize==1.4.1 -importlib-metadata==7.1.0 +importlib-metadata==8.2.0 iniconfig==2.0.0 -ipykernel==6.29.4 +ipykernel==6.29.5 ipympl==0.9.4 -ipython==8.24.0 +ipython==8.26.0 ipython-genutils==0.2.0 -ipywidgets==8.1.2 +ipywidgets==8.1.3 isoduration==20.11.0 -isort==5.13.2 jedi==0.19.1 jinja2==3.1.4 json5==0.9.25 -jsonpointer==2.4 -jsonschema==4.22.0 +jsonpointer==3.0.0 +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 jupyter==1.0.0 jupyter-cache==1.0.0 @@ -68,17 +66,17 @@ jupyter-console==6.6.3 jupyter-core==5.7.2 jupyter-events==0.10.0 jupyter-lsp==2.2.5 -jupyter-server==2.14.0 +jupyter-server==2.14.2 jupyter-server-mathjax==0.2.6 jupyter-server-terminals==0.5.3 -jupyterlab==4.2.1 -jupyterlab-code-formatter==2.2.1 +jupyterlab==4.2.4 +jupyterlab-code-formatter==3.0.1 jupyterlab-git==0.50.1 jupyterlab-lsp==5.1.0 jupyterlab-myst==2.4.2 jupyterlab-pygments==0.3.0 -jupyterlab-server==2.27.2 -jupyterlab-widgets==3.0.10 +jupyterlab-server==2.27.3 +jupyterlab-widgets==3.0.11 kiwisolver==1.4.5 latexcodec==3.0.0 lsprotocol==2023.0.1 @@ -90,110 +88,108 @@ mdit-py-plugins==0.4.1 mdurl==0.1.2 mistune==3.0.2 mpl-interactions==0.24.1 -mypy-extensions==1.0.0 -myst-nb==1.1.0 -myst-parser==3.0.1 +myst-nb==1.1.1 +myst-parser==4.0.0 nbclient==0.6.8 nbconvert==7.16.4 nbdime==4.0.1 nbformat==5.10.4 -nbmake==1.5.3 +nbmake==1.5.4 nest-asyncio==1.6.0 -nodeenv==1.8.0 -notebook==7.2.0 +nodeenv==1.9.1 +notebook==7.2.1 notebook-shim==0.2.4 numpy==1.26.4 overrides==7.7.0 -packaging==24.0 +packaging==24.1 pandocfilters==1.5.1 parso==0.8.4 -pathspec==0.12.1 pexpect==4.9.0 -pillow==10.3.0 +pillow==10.4.0 platformdirs==4.2.2 pluggy==1.5.0 -pre-commit==3.7.1 +pre-commit==3.8.0 prometheus-client==0.20.0 -prompt-toolkit==3.0.43 -psutil==5.9.8 +prompt-toolkit==3.0.47 +psutil==6.0.0 ptyprocess==0.7.0 -pure-eval==0.2.2 +pure-eval==0.2.3 pybtex==0.24.0 pybtex-docutils==1.0.3 pycparser==2.22 -pydata-sphinx-theme==0.15.2 +pydata-sphinx-theme==0.15.4 pygments==2.18.0 pyparsing==3.1.2 -pyproject-api==1.6.1 -pytest==8.2.1 +pyproject-api==1.7.1 +pytest==8.3.2 python-dateutil==2.9.0.post0 python-json-logger==2.0.7 python-lsp-jsonrpc==1.1.2 -python-lsp-ruff==2.2.1 +python-lsp-ruff==2.2.2 python-lsp-server==1.11.0 pytoolconfig==1.3.1 -pyyaml==6.0.1 -pyzmq==26.0.3 +pyyaml==6.0.2 +pyzmq==26.1.0 qtconsole==5.5.2 qtpy==2.4.1 referencing==0.35.1 -requests==2.32.2 +requests==2.32.3 rfc3339-validator==0.1.4 rfc3986-validator==0.1.1 rope==1.13.0 -rpds-py==0.18.1 -ruff==0.4.5 +rpds-py==0.20.0 +ruff==0.5.6 send2trash==1.8.3 six==1.16.0 smmap==5.0.1 sniffio==1.3.1 snowballstemmer==2.2.0 soupsieve==2.5 -sphinx==7.3.7 +sphinx==8.0.2 sphinx-api-relink==0.0.9 sphinx-autobuild==2024.4.16 -sphinx-book-theme==1.1.2 -sphinx-codeautolink==0.15.1 +sphinx-book-theme==1.1.3 +sphinx-codeautolink==0.15.2 sphinx-comments==0.0.3 sphinx-copybutton==0.5.2 -sphinx-design==0.6.0 +sphinx-design==0.6.1 sphinx-hep-pdgref==0.2.0 sphinx-pybtex-etal-style==0.0.2 sphinx-remove-toctrees==1.0.0.post1 sphinx-thebe==0.3.1 sphinx-togglebutton==0.3.2 -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 sphinxcontrib-bibtex==2.6.2 -sphinxcontrib-devhelp==1.0.6 -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-devhelp==2.0.0 +sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.7 -sphinxcontrib-serializinghtml==1.1.10 -sqlalchemy==2.0.30 +sphinxcontrib-qthelp==2.0.0 +sphinxcontrib-serializinghtml==2.0.0 +sqlalchemy==2.0.32 stack-data==0.6.3 -starlette==0.37.2 +starlette==0.38.2 tabulate==0.9.0 terminado==0.18.1 tinycss2==1.3.0 -tornado==6.4 -tox==4.15.0 +tornado==6.4.1 +tox==4.17.0 traitlets==5.14.3 types-python-dateutil==2.9.0.20240316 -typing-extensions==4.11.0 +typing-extensions==4.12.2 ujson==5.10.0 uri-template==1.3.0 -urllib3==2.2.1 -uvicorn==0.29.0 -virtualenv==20.26.2 -watchfiles==0.21.0 +urllib3==2.2.2 +uvicorn==0.30.5 +virtualenv==20.26.3 +watchfiles==0.23.0 wcwidth==0.2.13 -webcolors==1.13 +webcolors==24.6.0 webencodings==0.5.1 websocket-client==1.8.0 websockets==12.0 -wheel==0.43.0 -widgetsnbextension==4.0.10 -zipp==3.18.2 +wheel==0.44.0 +widgetsnbextension==4.0.11 +zipp==3.19.2 # The following packages were excluded from the output: # setuptools diff --git a/.constraints/py3.12.txt b/.constraints/py3.12.txt index 6aee5dc3..6e12dda7 100644 --- a/.constraints/py3.12.txt +++ b/.constraints/py3.12.txt @@ -1,22 +1,21 @@ # This file was autogenerated by uv via the following command: # uv pip compile pyproject.toml -o .constraints/py3.12.txt --all-extras --no-annotate --python-version=3.12 --no-emit-package setuptools accessible-pygments==0.0.5 -alabaster==0.7.16 -anyio==4.3.0 +alabaster==1.0.0 +anyio==4.4.0 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 arrow==1.3.0 asttokens==2.4.1 async-lru==2.0.4 -attrs==23.2.0 +attrs==24.2.0 babel==2.15.0 beautifulsoup4==4.12.3 -black==24.4.2 bleach==6.1.0 -cachetools==5.3.3 +cachetools==5.4.0 cattrs==23.2.3 -certifi==2024.2.2 -cffi==1.16.0 +certifi==2024.7.4 +cffi==1.17.0 cfgv==3.4.0 chardet==5.2.0 charset-normalizer==3.3.2 @@ -25,16 +24,16 @@ colorama==0.4.6 comm==0.2.2 contourpy==1.2.1 cycler==0.12.1 -debugpy==1.8.1 +debugpy==1.8.5 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.8 docstring-to-markdown==0.15 docutils==0.21.2 executing==2.0.1 -fastjsonschema==2.19.1 -filelock==3.14.0 -fonttools==4.51.0 +fastjsonschema==2.20.0 +filelock==3.15.4 +fonttools==4.53.1 fqdn==1.5.1 gitdb==4.0.11 gitpython==3.1.43 @@ -43,23 +42,22 @@ greenlet==3.0.3 h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 -identify==2.5.36 +identify==2.6.0 idna==3.7 imagesize==1.4.1 -importlib-metadata==7.1.0 +importlib-metadata==8.2.0 iniconfig==2.0.0 -ipykernel==6.29.4 +ipykernel==6.29.5 ipympl==0.9.4 -ipython==8.24.0 +ipython==8.26.0 ipython-genutils==0.2.0 -ipywidgets==8.1.2 +ipywidgets==8.1.3 isoduration==20.11.0 -isort==5.13.2 jedi==0.19.1 jinja2==3.1.4 json5==0.9.25 -jsonpointer==2.4 -jsonschema==4.22.0 +jsonpointer==3.0.0 +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 jupyter==1.0.0 jupyter-cache==1.0.0 @@ -68,17 +66,17 @@ jupyter-console==6.6.3 jupyter-core==5.7.2 jupyter-events==0.10.0 jupyter-lsp==2.2.5 -jupyter-server==2.14.0 +jupyter-server==2.14.2 jupyter-server-mathjax==0.2.6 jupyter-server-terminals==0.5.3 -jupyterlab==4.2.1 -jupyterlab-code-formatter==2.2.1 +jupyterlab==4.2.4 +jupyterlab-code-formatter==3.0.1 jupyterlab-git==0.50.1 jupyterlab-lsp==5.1.0 jupyterlab-myst==2.4.2 jupyterlab-pygments==0.3.0 -jupyterlab-server==2.27.2 -jupyterlab-widgets==3.0.10 +jupyterlab-server==2.27.3 +jupyterlab-widgets==3.0.11 kiwisolver==1.4.5 latexcodec==3.0.0 lsprotocol==2023.0.1 @@ -90,110 +88,108 @@ mdit-py-plugins==0.4.1 mdurl==0.1.2 mistune==3.0.2 mpl-interactions==0.24.1 -mypy-extensions==1.0.0 -myst-nb==1.1.0 -myst-parser==3.0.1 +myst-nb==1.1.1 +myst-parser==4.0.0 nbclient==0.6.8 nbconvert==7.16.4 nbdime==4.0.1 nbformat==5.10.4 -nbmake==1.5.3 +nbmake==1.5.4 nest-asyncio==1.6.0 -nodeenv==1.8.0 -notebook==7.2.0 +nodeenv==1.9.1 +notebook==7.2.1 notebook-shim==0.2.4 numpy==1.26.4 overrides==7.7.0 -packaging==24.0 +packaging==24.1 pandocfilters==1.5.1 parso==0.8.4 -pathspec==0.12.1 pexpect==4.9.0 -pillow==10.3.0 +pillow==10.4.0 platformdirs==4.2.2 pluggy==1.5.0 -pre-commit==3.7.1 +pre-commit==3.8.0 prometheus-client==0.20.0 -prompt-toolkit==3.0.43 -psutil==5.9.8 +prompt-toolkit==3.0.47 +psutil==6.0.0 ptyprocess==0.7.0 -pure-eval==0.2.2 +pure-eval==0.2.3 pybtex==0.24.0 pybtex-docutils==1.0.3 pycparser==2.22 -pydata-sphinx-theme==0.15.2 +pydata-sphinx-theme==0.15.4 pygments==2.18.0 pyparsing==3.1.2 -pyproject-api==1.6.1 -pytest==8.2.1 +pyproject-api==1.7.1 +pytest==8.3.2 python-dateutil==2.9.0.post0 python-json-logger==2.0.7 python-lsp-jsonrpc==1.1.2 -python-lsp-ruff==2.2.1 +python-lsp-ruff==2.2.2 python-lsp-server==1.11.0 pytoolconfig==1.3.1 -pyyaml==6.0.1 -pyzmq==26.0.3 +pyyaml==6.0.2 +pyzmq==26.1.0 qtconsole==5.5.2 qtpy==2.4.1 referencing==0.35.1 -requests==2.32.2 +requests==2.32.3 rfc3339-validator==0.1.4 rfc3986-validator==0.1.1 rope==1.13.0 -rpds-py==0.18.1 -ruff==0.4.5 +rpds-py==0.20.0 +ruff==0.5.6 send2trash==1.8.3 six==1.16.0 smmap==5.0.1 sniffio==1.3.1 snowballstemmer==2.2.0 soupsieve==2.5 -sphinx==7.3.7 +sphinx==8.0.2 sphinx-api-relink==0.0.9 sphinx-autobuild==2024.4.16 -sphinx-book-theme==1.1.2 -sphinx-codeautolink==0.15.1 +sphinx-book-theme==1.1.3 +sphinx-codeautolink==0.15.2 sphinx-comments==0.0.3 sphinx-copybutton==0.5.2 -sphinx-design==0.6.0 +sphinx-design==0.6.1 sphinx-hep-pdgref==0.2.0 sphinx-pybtex-etal-style==0.0.2 sphinx-remove-toctrees==1.0.0.post1 sphinx-thebe==0.3.1 sphinx-togglebutton==0.3.2 -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 sphinxcontrib-bibtex==2.6.2 -sphinxcontrib-devhelp==1.0.6 -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-devhelp==2.0.0 +sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.7 -sphinxcontrib-serializinghtml==1.1.10 -sqlalchemy==2.0.30 +sphinxcontrib-qthelp==2.0.0 +sphinxcontrib-serializinghtml==2.0.0 +sqlalchemy==2.0.32 stack-data==0.6.3 -starlette==0.37.2 +starlette==0.38.2 tabulate==0.9.0 terminado==0.18.1 tinycss2==1.3.0 -tornado==6.4 -tox==4.15.0 +tornado==6.4.1 +tox==4.17.0 traitlets==5.14.3 types-python-dateutil==2.9.0.20240316 -typing-extensions==4.11.0 +typing-extensions==4.12.2 ujson==5.10.0 uri-template==1.3.0 -urllib3==2.2.1 -uvicorn==0.29.0 -virtualenv==20.26.2 -watchfiles==0.21.0 +urllib3==2.2.2 +uvicorn==0.30.5 +virtualenv==20.26.3 +watchfiles==0.23.0 wcwidth==0.2.13 -webcolors==1.13 +webcolors==24.6.0 webencodings==0.5.1 websocket-client==1.8.0 websockets==12.0 -wheel==0.43.0 -widgetsnbextension==4.0.10 -zipp==3.18.2 +wheel==0.44.0 +widgetsnbextension==4.0.11 +zipp==3.19.2 # The following packages were excluded from the output: # setuptools diff --git a/.constraints/py3.7.txt b/.constraints/py3.7.txt index c5abe7c8..46b1933d 100644 --- a/.constraints/py3.7.txt +++ b/.constraints/py3.7.txt @@ -8,16 +8,15 @@ anyio==3.7.1 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 arrow==1.2.3 -attrs==23.2.0 +attrs==24.2.0 babel==2.14.0 backcall==0.2.0 beautifulsoup4==4.12.3 -black==23.3.0 bleach==6.0.0 cached-property==1.5.2 -cachetools==5.3.3 +cachetools==5.4.0 cattrs==23.1.2 -certifi==2024.2.2 +certifi==2024.7.4 cffi==1.15.1 cfgv==3.3.1 chardet==5.2.0 @@ -33,8 +32,8 @@ distlib==0.3.8 docstring-to-markdown==0.15 docutils==0.17.1 entrypoints==0.4 -exceptiongroup==1.2.1 -fastjsonschema==2.19.1 +exceptiongroup==1.2.2 +fastjsonschema==2.20.0 filelock==3.12.2 fonttools==4.38.0 fqdn==1.5.1 @@ -52,13 +51,12 @@ ipykernel==6.16.2 ipympl==0.9.3 ipython==7.34.0 ipython-genutils==0.2.0 -ipywidgets==8.1.2 +ipywidgets==8.1.3 isoduration==20.11.0 -isort==5.11.5 jedi==0.18.2 jinja2==3.1.4 json5==0.9.16 -jsonpointer==2.4 +jsonpointer==3.0.0 jsonschema==4.17.3 jupyter==1.0.0 jupyter-cache==0.5.0 @@ -73,16 +71,16 @@ jupyter-server-mathjax==0.2.6 jupyter-server-ydoc==0.8.0 jupyter-ydoc==0.2.4 jupyterlab==3.6.7 -jupyterlab-code-formatter==2.2.1 +jupyterlab-code-formatter==3.0.1 jupyterlab-git==0.44.0 jupyterlab-lsp==3.10.2 jupyterlab-myst==1.2.0 jupyterlab-pygments==0.2.2 jupyterlab-server==2.24.0 -jupyterlab-widgets==3.0.10 +jupyterlab-widgets==3.0.11 kiwisolver==1.4.5 latexcodec==3.0.0 -livereload==2.6.3 +livereload==2.7.0 lsprotocol==2023.0.1 markdown-it-py==2.2.0 markupsafe==2.1.5 @@ -92,24 +90,22 @@ mdit-py-plugins==0.3.5 mdurl==0.1.2 mistune==3.0.2 mpl-interactions==0.24.1 -mypy-extensions==1.0.0 myst-nb==0.17.2 myst-parser==0.18.1 -nbclassic==1.0.0 +nbclassic==1.1.0 nbclient==0.5.13 nbconvert==7.6.0 nbdime==3.2.1 nbformat==5.8.0 nbmake==1.2.1 nest-asyncio==1.6.0 -nodeenv==1.8.0 +nodeenv==1.9.1 notebook==6.5.7 notebook-shim==0.2.4 numpy==1.21.6 packaging==24.0 pandocfilters==1.5.1 parso==0.8.4 -pathspec==0.11.2 pexpect==4.9.0 pickleshare==0.7.5 pillow==9.5.0 @@ -118,13 +114,13 @@ platformdirs==4.0.0 pluggy==1.2.0 pre-commit==2.21.0 prometheus-client==0.17.1 -prompt-toolkit==3.0.43 -psutil==5.9.8 +prompt-toolkit==3.0.47 +psutil==6.0.0 ptyprocess==0.7.0 pybtex==0.24.0 pybtex-docutils==1.0.3 pycparser==2.21 -pydantic==1.10.15 +pydantic==1.10.17 pydata-sphinx-theme==0.13.3 pygments==2.17.2 pyparsing==3.1.2 @@ -139,7 +135,7 @@ python-lsp-server==1.7.4 pytoolconfig==1.3.0 pytz==2024.1 pyyaml==6.0.1 -pyzmq==26.0.3 +pyzmq==26.1.0 qtconsole==5.4.4 qtpy==2.4.1 requests==2.31.0 @@ -157,7 +153,7 @@ sphinx==5.3.0 sphinx-api-relink==0.0.9 sphinx-autobuild==2021.3.14 sphinx-book-theme==1.0.1 -sphinx-codeautolink==0.15.1 +sphinx-codeautolink==0.15.2 sphinx-comments==0.0.3 sphinx-copybutton==0.5.2 sphinx-design==0.4.1 @@ -173,7 +169,7 @@ sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 -sqlalchemy==1.4.52 +sqlalchemy==1.4.53 tabulate==0.9.0 terminado==0.17.1 tinycss2==1.2.1 @@ -181,18 +177,17 @@ tomli==2.0.1 tornado==6.2 tox==4.8.0 traitlets==5.9.0 -typed-ast==1.5.5 typing-extensions==4.7.1 ujson==5.7.0 uri-template==1.3.0 urllib3==2.0.7 -virtualenv==20.26.2 +virtualenv==20.26.3 wcwidth==0.2.13 webcolors==1.13 webencodings==0.5.1 websocket-client==1.6.1 wheel==0.42.0 -widgetsnbextension==4.0.10 +widgetsnbextension==4.0.11 y-py==0.5.9 ypy-websocket==0.8.2 zipp==3.15.0 diff --git a/.constraints/py3.8.txt b/.constraints/py3.8.txt index f86740f0..938c00c8 100644 --- a/.constraints/py3.8.txt +++ b/.constraints/py3.8.txt @@ -2,22 +2,21 @@ # uv pip compile pyproject.toml -o .constraints/py3.8.txt --all-extras --no-annotate --python-version=3.8 --no-emit-package setuptools accessible-pygments==0.0.4 alabaster==0.7.13 -anyio==4.3.0 +anyio==4.4.0 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 arrow==1.3.0 asttokens==2.4.1 async-lru==2.0.4 -attrs==23.2.0 +attrs==24.2.0 babel==2.15.0 backcall==0.2.0 beautifulsoup4==4.12.3 -black==24.4.2 bleach==6.1.0 -cachetools==5.3.3 +cachetools==5.4.0 cattrs==23.2.3 -certifi==2024.2.2 -cffi==1.16.0 +certifi==2024.7.4 +cffi==1.17.0 cfgv==3.4.0 chardet==5.2.0 charset-normalizer==3.3.2 @@ -26,17 +25,17 @@ colorama==0.4.6 comm==0.2.2 contourpy==1.1.1 cycler==0.12.1 -debugpy==1.8.1 +debugpy==1.8.5 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.8 docstring-to-markdown==0.15 docutils==0.17.1 -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 executing==2.0.1 -fastjsonschema==2.19.1 -filelock==3.14.0 -fonttools==4.51.0 +fastjsonschema==2.20.0 +filelock==3.15.4 +fonttools==4.53.1 fqdn==1.5.1 gitdb==4.0.11 gitpython==3.1.43 @@ -45,24 +44,23 @@ greenlet==3.0.3 h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 -identify==2.5.36 +identify==2.6.0 idna==3.7 imagesize==1.4.1 -importlib-metadata==7.1.0 +importlib-metadata==8.2.0 importlib-resources==6.4.0 iniconfig==2.0.0 -ipykernel==6.29.4 +ipykernel==6.29.5 ipympl==0.9.3 ipython==8.12.3 ipython-genutils==0.2.0 -ipywidgets==8.1.2 +ipywidgets==8.1.3 isoduration==20.11.0 -isort==5.13.2 jedi==0.19.1 jinja2==3.1.4 json5==0.9.25 -jsonpointer==2.4 -jsonschema==4.22.0 +jsonpointer==3.0.0 +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 jupyter==1.0.0 jupyter-cache==0.6.1 @@ -71,20 +69,20 @@ jupyter-console==6.6.3 jupyter-core==5.7.2 jupyter-events==0.10.0 jupyter-lsp==2.2.5 -jupyter-server==2.14.0 +jupyter-server==2.14.2 jupyter-server-mathjax==0.2.6 jupyter-server-terminals==0.5.3 -jupyterlab==4.2.1 -jupyterlab-code-formatter==2.2.1 +jupyterlab==4.2.4 +jupyterlab-code-formatter==3.0.1 jupyterlab-git==0.50.1 jupyterlab-lsp==5.1.0 jupyterlab-myst==2.4.2 jupyterlab-pygments==0.3.0 -jupyterlab-server==2.27.2 -jupyterlab-widgets==3.0.10 +jupyterlab-server==2.27.3 +jupyterlab-widgets==3.0.11 kiwisolver==1.4.5 latexcodec==3.0.0 -livereload==2.6.3 +livereload==2.7.0 lsprotocol==2023.0.1 markdown-it-py==2.2.0 markupsafe==2.1.5 @@ -94,62 +92,60 @@ mdit-py-plugins==0.3.5 mdurl==0.1.2 mistune==3.0.2 mpl-interactions==0.24.1 -mypy-extensions==1.0.0 myst-nb==0.17.2 myst-parser==0.18.1 nbclient==0.6.8 nbconvert==7.16.4 nbdime==4.0.1 nbformat==5.10.4 -nbmake==1.5.3 +nbmake==1.5.4 nest-asyncio==1.6.0 -nodeenv==1.8.0 -notebook==7.2.0 +nodeenv==1.9.1 +notebook==7.2.1 notebook-shim==0.2.4 numpy==1.24.4 overrides==7.7.0 -packaging==24.0 +packaging==24.1 pandocfilters==1.5.1 parso==0.8.4 -pathspec==0.12.1 pexpect==4.9.0 pickleshare==0.7.5 -pillow==10.3.0 +pillow==10.4.0 pkgutil-resolve-name==1.3.10 platformdirs==4.2.2 pluggy==1.5.0 pre-commit==3.5.0 prometheus-client==0.20.0 -prompt-toolkit==3.0.43 -psutil==5.9.8 +prompt-toolkit==3.0.47 +psutil==6.0.0 ptyprocess==0.7.0 -pure-eval==0.2.2 +pure-eval==0.2.3 pybtex==0.24.0 pybtex-docutils==1.0.3 pycparser==2.22 pydata-sphinx-theme==0.14.4 pygments==2.18.0 pyparsing==3.1.2 -pyproject-api==1.6.1 -pytest==8.2.1 +pyproject-api==1.7.1 +pytest==8.3.2 python-dateutil==2.9.0.post0 python-json-logger==2.0.7 python-lsp-jsonrpc==1.1.2 -python-lsp-ruff==2.2.1 +python-lsp-ruff==2.2.2 python-lsp-server==1.11.0 pytoolconfig==1.3.1 pytz==2024.1 -pyyaml==6.0.1 -pyzmq==26.0.3 +pyyaml==6.0.2 +pyzmq==26.1.0 qtconsole==5.5.2 qtpy==2.4.1 referencing==0.35.1 -requests==2.32.2 +requests==2.32.3 rfc3339-validator==0.1.4 rfc3986-validator==0.1.1 rope==1.13.0 -rpds-py==0.18.1 -ruff==0.4.5 +rpds-py==0.20.0 +ruff==0.5.6 send2trash==1.8.3 six==1.16.0 smmap==5.0.1 @@ -160,7 +156,7 @@ sphinx==5.3.0 sphinx-api-relink==0.0.9 sphinx-autobuild==2021.3.14 sphinx-book-theme==1.0.1 -sphinx-codeautolink==0.15.1 +sphinx-codeautolink==0.15.2 sphinx-comments==0.0.3 sphinx-copybutton==0.5.2 sphinx-design==0.5.0 @@ -176,28 +172,28 @@ sphinxcontrib-htmlhelp==2.0.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 -sqlalchemy==2.0.30 +sqlalchemy==2.0.32 stack-data==0.6.3 tabulate==0.9.0 terminado==0.18.1 tinycss2==1.3.0 tomli==2.0.1 -tornado==6.4 -tox==4.15.0 +tornado==6.4.1 +tox==4.17.0 traitlets==5.14.3 types-python-dateutil==2.9.0.20240316 -typing-extensions==4.11.0 +typing-extensions==4.12.2 ujson==5.10.0 uri-template==1.3.0 -urllib3==2.2.1 -virtualenv==20.26.2 +urllib3==2.2.2 +virtualenv==20.26.3 wcwidth==0.2.13 -webcolors==1.13 +webcolors==24.6.0 webencodings==0.5.1 websocket-client==1.8.0 -wheel==0.43.0 -widgetsnbextension==4.0.10 -zipp==3.18.2 +wheel==0.44.0 +widgetsnbextension==4.0.11 +zipp==3.19.2 # The following packages were excluded from the output: # setuptools diff --git a/.constraints/py3.9.txt b/.constraints/py3.9.txt index cba682a1..5ba41958 100644 --- a/.constraints/py3.9.txt +++ b/.constraints/py3.9.txt @@ -2,21 +2,20 @@ # uv pip compile pyproject.toml -o .constraints/py3.9.txt --all-extras --no-annotate --python-version=3.9 --no-emit-package setuptools accessible-pygments==0.0.5 alabaster==0.7.16 -anyio==4.3.0 +anyio==4.4.0 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 arrow==1.3.0 asttokens==2.4.1 async-lru==2.0.4 -attrs==23.2.0 +attrs==24.2.0 babel==2.15.0 beautifulsoup4==4.12.3 -black==24.4.2 bleach==6.1.0 -cachetools==5.3.3 +cachetools==5.4.0 cattrs==23.2.3 -certifi==2024.2.2 -cffi==1.16.0 +certifi==2024.7.4 +cffi==1.17.0 cfgv==3.4.0 chardet==5.2.0 charset-normalizer==3.3.2 @@ -25,17 +24,17 @@ colorama==0.4.6 comm==0.2.2 contourpy==1.2.1 cycler==0.12.1 -debugpy==1.8.1 +debugpy==1.8.5 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.8 docstring-to-markdown==0.15 docutils==0.21.2 -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 executing==2.0.1 -fastjsonschema==2.19.1 -filelock==3.14.0 -fonttools==4.51.0 +fastjsonschema==2.20.0 +filelock==3.15.4 +fonttools==4.53.1 fqdn==1.5.1 gitdb==4.0.11 gitpython==3.1.43 @@ -44,24 +43,23 @@ greenlet==3.0.3 h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 -identify==2.5.36 +identify==2.6.0 idna==3.7 imagesize==1.4.1 -importlib-metadata==7.1.0 +importlib-metadata==8.2.0 importlib-resources==6.4.0 iniconfig==2.0.0 -ipykernel==6.29.4 +ipykernel==6.29.5 ipympl==0.9.4 ipython==8.18.1 ipython-genutils==0.2.0 -ipywidgets==8.1.2 +ipywidgets==8.1.3 isoduration==20.11.0 -isort==5.13.2 jedi==0.19.1 jinja2==3.1.4 json5==0.9.25 -jsonpointer==2.4 -jsonschema==4.22.0 +jsonpointer==3.0.0 +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 jupyter==1.0.0 jupyter-cache==1.0.0 @@ -70,17 +68,17 @@ jupyter-console==6.6.3 jupyter-core==5.7.2 jupyter-events==0.10.0 jupyter-lsp==2.2.5 -jupyter-server==2.14.0 +jupyter-server==2.14.2 jupyter-server-mathjax==0.2.6 jupyter-server-terminals==0.5.3 -jupyterlab==4.2.1 -jupyterlab-code-formatter==2.2.1 +jupyterlab==4.2.4 +jupyterlab-code-formatter==3.0.1 jupyterlab-git==0.50.1 jupyterlab-lsp==5.1.0 jupyterlab-myst==2.4.2 jupyterlab-pygments==0.3.0 -jupyterlab-server==2.27.2 -jupyterlab-widgets==3.0.10 +jupyterlab-server==2.27.3 +jupyterlab-widgets==3.0.11 kiwisolver==1.4.5 latexcodec==3.0.0 lsprotocol==2023.0.1 @@ -92,111 +90,109 @@ mdit-py-plugins==0.4.1 mdurl==0.1.2 mistune==3.0.2 mpl-interactions==0.24.1 -mypy-extensions==1.0.0 -myst-nb==1.1.0 +myst-nb==1.1.1 myst-parser==3.0.1 nbclient==0.6.8 nbconvert==7.16.4 nbdime==4.0.1 nbformat==5.10.4 -nbmake==1.5.3 +nbmake==1.5.4 nest-asyncio==1.6.0 -nodeenv==1.8.0 -notebook==7.2.0 +nodeenv==1.9.1 +notebook==7.2.1 notebook-shim==0.2.4 numpy==1.26.4 overrides==7.7.0 -packaging==24.0 +packaging==24.1 pandocfilters==1.5.1 parso==0.8.4 -pathspec==0.12.1 pexpect==4.9.0 -pillow==10.3.0 +pillow==10.4.0 platformdirs==4.2.2 pluggy==1.5.0 -pre-commit==3.7.1 +pre-commit==3.8.0 prometheus-client==0.20.0 -prompt-toolkit==3.0.43 -psutil==5.9.8 +prompt-toolkit==3.0.47 +psutil==6.0.0 ptyprocess==0.7.0 -pure-eval==0.2.2 +pure-eval==0.2.3 pybtex==0.24.0 pybtex-docutils==1.0.3 pycparser==2.22 -pydata-sphinx-theme==0.15.2 +pydata-sphinx-theme==0.15.4 pygments==2.18.0 pyparsing==3.1.2 -pyproject-api==1.6.1 -pytest==8.2.1 +pyproject-api==1.7.1 +pytest==8.3.2 python-dateutil==2.9.0.post0 python-json-logger==2.0.7 python-lsp-jsonrpc==1.1.2 -python-lsp-ruff==2.2.1 +python-lsp-ruff==2.2.2 python-lsp-server==1.11.0 pytoolconfig==1.3.1 -pyyaml==6.0.1 -pyzmq==26.0.3 +pyyaml==6.0.2 +pyzmq==26.1.0 qtconsole==5.5.2 qtpy==2.4.1 referencing==0.35.1 -requests==2.32.2 +requests==2.32.3 rfc3339-validator==0.1.4 rfc3986-validator==0.1.1 rope==1.13.0 -rpds-py==0.18.1 -ruff==0.4.5 +rpds-py==0.20.0 +ruff==0.5.6 send2trash==1.8.3 six==1.16.0 smmap==5.0.1 sniffio==1.3.1 snowballstemmer==2.2.0 soupsieve==2.5 -sphinx==7.3.7 +sphinx==7.4.7 sphinx-api-relink==0.0.9 sphinx-autobuild==2024.4.16 -sphinx-book-theme==1.1.2 -sphinx-codeautolink==0.15.1 +sphinx-book-theme==1.1.3 +sphinx-codeautolink==0.15.2 sphinx-comments==0.0.3 sphinx-copybutton==0.5.2 -sphinx-design==0.6.0 +sphinx-design==0.6.1 sphinx-hep-pdgref==0.2.0 sphinx-pybtex-etal-style==0.0.2 sphinx-remove-toctrees==1.0.0.post1 sphinx-thebe==0.3.1 sphinx-togglebutton==0.3.2 -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 sphinxcontrib-bibtex==2.6.2 -sphinxcontrib-devhelp==1.0.6 -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-devhelp==2.0.0 +sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.7 -sphinxcontrib-serializinghtml==1.1.10 -sqlalchemy==2.0.30 +sphinxcontrib-qthelp==2.0.0 +sphinxcontrib-serializinghtml==2.0.0 +sqlalchemy==2.0.32 stack-data==0.6.3 -starlette==0.37.2 +starlette==0.38.2 tabulate==0.9.0 terminado==0.18.1 tinycss2==1.3.0 tomli==2.0.1 -tornado==6.4 -tox==4.15.0 +tornado==6.4.1 +tox==4.17.0 traitlets==5.14.3 types-python-dateutil==2.9.0.20240316 -typing-extensions==4.11.0 +typing-extensions==4.12.2 ujson==5.10.0 uri-template==1.3.0 -urllib3==2.2.1 -uvicorn==0.29.0 -virtualenv==20.26.2 -watchfiles==0.21.0 +urllib3==2.2.2 +uvicorn==0.30.5 +virtualenv==20.26.3 +watchfiles==0.23.0 wcwidth==0.2.13 -webcolors==1.13 +webcolors==24.6.0 webencodings==0.5.1 websocket-client==1.8.0 websockets==12.0 -wheel==0.43.0 -widgetsnbextension==4.0.10 -zipp==3.18.2 +wheel==0.44.0 +widgetsnbextension==4.0.11 +zipp==3.19.2 # The following packages were excluded from the output: # setuptools diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..cce3aa58 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +layout anaconda diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a052edc4..42616bb8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,7 +35,7 @@ jobs: with: apt-packages: graphviz gh-pages: true - python-version: "3.10" + python-version: "3.12" specific-pip-packages: ${{ inputs.specific-pip-packages }} style: if: inputs.specific-pip-packages == '' @@ -43,4 +43,4 @@ jobs: token: ${{ secrets.PAT }} uses: ComPWA/actions/.github/workflows/pre-commit.yml@v1 with: - python-version: "3.10" + python-version: "3.12" diff --git a/.gitpod.yml b/.gitpod.yml index 537fcbbe..4acfb78d 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,6 +1,6 @@ tasks: - - init: pyenv local 3.10 - - init: pip install -c .constraints/py3.10.txt -e .[dev] + - init: pyenv local 3.12 + - init: pip install -c .constraints/py3.12.txt -e .[dev] github: prebuilds: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 467a74f9..02add6a4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,12 +42,12 @@ repos: metadata.vscode - repo: https://github.com/ComPWA/policy - rev: 0.3.9 + rev: 0.3.18 hooks: - id: check-dev-files args: - --doc-apt-packages=graphviz - - --dev-python-version=3.10 + - --dev-python-version=3.12 - --github-pages - --no-prettierrc - --pin-requirements=bimonthly @@ -55,9 +55,10 @@ repos: - --repo-title=ComPWA Organization - id: colab-toc-visible - id: fix-nbformat-version + - id: remove-empty-tags - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.5 + rev: v0.5.6 hooks: - id: ruff args: [--fix] @@ -108,7 +109,7 @@ repos: exclude: (?x)^(.*/Manifest\.toml|.*/Project\.toml)$ - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.28.4 + rev: 0.29.1 hooks: - id: check-jsonschema name: Check CITATION.cff @@ -121,7 +122,7 @@ repos: pass_filenames: false - repo: https://github.com/streetsidesoftware/cspell-cli - rev: v8.8.1 + rev: v8.13.1 hooks: - id: cspell @@ -159,6 +160,6 @@ repos: - jupyter - repo: https://github.com/ComPWA/mirrors-pyright - rev: v1.1.364 + rev: v1.1.374 hooks: - id: pyright diff --git a/.pre-commit/pin_nb_requirements.py b/.pre-commit/pin_nb_requirements.py index 644d1ba9..ebd958a2 100644 --- a/.pre-commit/pin_nb_requirements.py +++ b/.pre-commit/pin_nb_requirements.py @@ -37,7 +37,7 @@ def main(argv: Sequence[str] | None = None) -> int: for filename in args.filenames: try: check_pinned_requirements(filename) - except PrecommitError as exception: + except PrecommitError as exception: # noqa: PERF203 errors.append(exception) if errors: for error in errors: diff --git a/.readthedocs.yml b/.readthedocs.yml index 96f71c8d..1429c63f 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -13,10 +13,10 @@ build: apt_packages: - graphviz tools: - python: "3.10" + python: "3.12" jobs: pre_install: - ./docs/install-julia-on-rtd.sh post_install: - python -m pip install 'uv>=0.2.0' - - python -m uv pip install -c .constraints/py3.10.txt -e .[doc] + - python -m uv pip install -c .constraints/py3.12.txt -e .[doc] diff --git a/.vscode/settings.json b/.vscode/settings.json index fc802d40..e600900a 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -30,6 +30,9 @@ "\u03bd": true, "\u03c3": true }, + "files.associations": { + "**/.constraints/py*.txt": "pip-requirements" + }, "files.watcherExclude": { "**/*_cache/**": true, "**/.eggs/**": true, @@ -54,6 +57,7 @@ "notebook.gotoSymbols.showAllSymbols": true, "python.analysis.autoImportCompletions": false, "python.analysis.typeCheckingMode": "strict", + "python.terminal.activateEnvironment": false, "python.testing.pytestEnabled": false, "redhat.telemetry.enabled": false, "rewrap.wrappingColumn": 88, diff --git a/docs/adr/001/operators.ipynb b/docs/adr/001/operators.ipynb index 7ad21fe3..988274d6 100644 --- a/docs/adr/001/operators.ipynb +++ b/docs/adr/001/operators.ipynb @@ -75,9 +75,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "import graphviz\n", @@ -192,7 +190,7 @@ " __rfloordiv__ = MAKE_RBINARY(operator.floordiv)\n", "\n", " def __neg__(self):\n", - " return UnaryOp(self, lambda x: -x)\n", + " return UnaryOp(self, operator.neg)\n", "\n", " @property\n", " def value(self):\n", diff --git a/docs/adr/001/sympy.ipynb b/docs/adr/001/sympy.ipynb index c146c640..d2c1b24f 100644 --- a/docs/adr/001/sympy.ipynb +++ b/docs/adr/001/sympy.ipynb @@ -64,9 +64,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "from __future__ import annotations\n", @@ -92,9 +90,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "x = sp.Symbol(\"x\")" @@ -110,9 +106,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "model = AmplitudeModel()\n", @@ -308,9 +302,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "plot_model(model)" @@ -346,9 +338,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "model.initial_values[sp.Symbol(R\"\\sigma_1\")] = sp.Symbol(R\"\\sigma_3\")\n", @@ -381,9 +371,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "model.initial_values[sp.Symbol(R\"\\sigma_3\")] = 1\n", @@ -418,9 +406,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "model.dynamics[sp.Symbol(R\"\\mathrm{dyn}_3\")] = sp.sqrt(x)\n", @@ -642,9 +628,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "import math\n", @@ -692,9 +676,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", @@ -864,9 +846,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "import matplotlib.pyplot as plt\n", @@ -900,9 +880,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "parameter_values = (1.0, 0.0, 0.1, 2.0, 2.0, 0.1)\n", diff --git a/docs/adr/002/composition.ipynb b/docs/adr/002/composition.ipynb index 6eb11c6a..fd82358a 100644 --- a/docs/adr/002/composition.ipynb +++ b/docs/adr/002/composition.ipynb @@ -281,9 +281,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "sp.plot(sp.Abs(evaluated_bw), (m, 0, 2), axis_center=(0, 0), ylim=(0, 1))\n", diff --git a/docs/adr/002/expr.ipynb b/docs/adr/002/expr.ipynb index 797d66b7..9c9d6577 100644 --- a/docs/adr/002/expr.ipynb +++ b/docs/adr/002/expr.ipynb @@ -538,9 +538,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "sp.plot(sp.Abs(rel_bw.subs({m0: 1, w0: 0.2})).doit(), (m, 0, 2));" diff --git a/docs/adr/002/function.ipynb b/docs/adr/002/function.ipynb index 851c0327..5718bdb1 100644 --- a/docs/adr/002/function.ipynb +++ b/docs/adr/002/function.ipynb @@ -195,9 +195,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "m, m0, w0 = sp.symbols(R\"m m_0 \\Gamma\")\n", diff --git a/docs/report/000.ipynb b/docs/report/000.ipynb index ccf08615..d1151c00 100644 --- a/docs/report/000.ipynb +++ b/docs/report/000.ipynb @@ -38,9 +38,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "" ] @@ -452,9 +450,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Conditional square root" ] @@ -743,9 +739,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "jax_complex_sqrt_error = jax.jit(np_complex_sqrt)\n", diff --git a/docs/report/001.ipynb b/docs/report/001.ipynb index fb467f3d..186d8c16 100644 --- a/docs/report/001.ipynb +++ b/docs/report/001.ipynb @@ -433,9 +433,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "from sympy.printing.numpy import NumPyPrinter\n", diff --git a/docs/report/002.ipynb b/docs/report/002.ipynb index e0213b7f..b58b1a15 100644 --- a/docs/report/002.ipynb +++ b/docs/report/002.ipynb @@ -770,9 +770,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Test with data" ] @@ -801,9 +799,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "sympy_model = SympyModel(\n", diff --git a/docs/report/003.ipynb b/docs/report/003.ipynb index 828d4261..a80948b0 100644 --- a/docs/report/003.ipynb +++ b/docs/report/003.ipynb @@ -50,9 +50,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "This report is an attempt formulate the Chew-Mandelstam function described in [PDG2023, §50.3.3 (Resonances), pp.14–15](https://pdg.lbl.gov/2023/reviews/rpp2023-rev-resonances.pdf#page=15) with [SymPy](https://docs.sympy.org), so that it can be implemented in [AmpForm](https://ampform.rtfd.io)." ] @@ -133,18 +131,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## S-wave" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "As can be seen in Eq. (50.44) on [PDG2023, §Resonances, p.15](https://pdg.lbl.gov/2023/reviews/rpp2023-rev-resonances.pdf#page=15), the Chew-Mandelstam function $\\Sigma_a$ for a particle $a$ decaying to particles $1, 2$ has a simple form for angular momentum $L=0$ ($S$-wave):\n", "\n", @@ -242,9 +236,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "It should be noted that this equation is not well-defined along the real axis, that is, for $\\mathrm{Im}(s) = 0$. For this reason, we split $s$ into a real part $s'$ with a small imaginary offset (the PDG indicates this with $s+0i$). We parametrized this imaginary offset with $\\epsilon$, and for the interactive plot, we do so with a power of $10$:" ] @@ -296,9 +288,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "symbols = (s_prime, m1, m2, epsilon)\n", @@ -308,9 +298,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "As starting values for the interactive plot, we assume $\\pi\\eta$ scattering (just like in the PDG section) and use their masses as values for $m_1$ and $m_1$, respectively." ] @@ -489,9 +477,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{tip}\n", "Compare the plots above with Figure 50.6 on [PDG2023, §Resonances, p.16](https://pdg.lbl.gov/2023/reviews/rpp2023-rev-resonances.pdf#page=15).\n", @@ -500,18 +486,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## General dispersion integral" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "For higher angular momenta, the PDG notes that one has to compute the dispersion integral given by Eq. (50.44) on [PDG2023, §Resonances, p.15](https://pdg.lbl.gov/2023/reviews/rpp2023-rev-resonances.pdf#page=15):\n", "\n", @@ -531,9 +513,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "From Equations (50.33–34) on [PDG2023, §Resonances, p.12](https://pdg.lbl.gov/2023/reviews/rpp2023-rev-resonances.pdf#page=12), it can be deduced that the function $n_a^2$ is the same as AmpForm's {class}`~ampform.dynamics.BlattWeisskopfSquared` (note that this function is normalized, whereas the PDG's $F_j$ function has $1$ in the nominator). For this reason, we simply use {class}`~ampform.dynamics.BlattWeisskopfSquared` for the definition of $n_a^2$:" ] @@ -579,9 +559,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "For $\\rho_a$, we use AmpForm's {class}`~ampform.dynamics.phasespace.PhaseSpaceFactor`:" ] @@ -604,9 +582,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The symbolic integrand is then formulated as:" ] @@ -626,9 +602,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Next, we {func}`~sympy.utilities.lambdify.lambdify` this integrand to a {mod}`numpy` expression so that we can integrate it efficiently:" ] @@ -636,9 +610,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "integrand_func = sp.lambdify(\n", @@ -650,9 +622,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{note}\n", "Integrals can be expressed symbolically with SymPy, with some caveats. See {ref}`report/016:SymPy integral`.\n", @@ -661,9 +631,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "As discussed in [TR-016](016.ipynb), {func}`scipy.integrate.quad` cannot integrate over complex-valued functions, but {func}`scipy.integrate.quad_vec` can. For comparison, we now compute this integral for a few values of $L>0$:" ] @@ -671,9 +639,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "s_domain = np.linspace(s_min, s_max, num=50)\n", @@ -683,9 +649,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "It is handy to store the numerical results of each dispersion integral in a {obj}`dict` with $L$ as keys:" ] @@ -717,9 +681,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Finally, as can be seen from Eq. {eq}`dispersion-integral`, the resulting values from the integral have to be shifted with a factor $\\frac{s-s_{\\mathrm{thr}_a}}{\\pi}$ to get $\\Sigma_a$. We also scale the values with $16\\pi$ so that it can be compared with the plot generated in {ref}`report/003:S-wave`." ] @@ -727,9 +689,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "sigma = {\n", @@ -793,9 +753,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## SymPy expressions" ] @@ -901,9 +859,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{warning}\n", "\n", @@ -917,9 +873,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "UnevaluatableIntegral.abs_tolerance = 1e-4\n", @@ -962,9 +916,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "```{autolink-skip}\n", "```" @@ -973,9 +925,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "s_values = np.linspace(-0.15, 1.4, num=200)\n", @@ -985,9 +935,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Note that the dispersion integral for $L=0$ indeed reproduces the same shape as in {ref}`report/003:S-wave`!" ] diff --git a/docs/report/004.ipynb b/docs/report/004.ipynb index bb6546a3..daba0609 100644 --- a/docs/report/004.ipynb +++ b/docs/report/004.ipynb @@ -437,8 +437,8 @@ "\n", "z_cut_min = 0.75 * z_min\n", "z_cut_max = 0.75 * z_max\n", - "cut_off_min = np.vectorize(lambda z: z if z > z_cut_min else z_cut_min)\n", - "cut_off_max = np.vectorize(lambda z: z if z < z_cut_max else z_cut_max)\n", + "cut_off_min = np.vectorize(lambda z: max(z_cut_min, z))\n", + "cut_off_max = np.vectorize(lambda z: min(z_cut_max, z))\n", "\n", "plot_style = {\n", " \"linewidth\": 0,\n", diff --git a/docs/report/005.ipynb b/docs/report/005.ipynb index c40485fe..3b177eea 100644 --- a/docs/report/005.ipynb +++ b/docs/report/005.ipynb @@ -317,9 +317,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "n_channels = 1\n", @@ -340,9 +338,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def Kij(\n", @@ -564,9 +560,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Generalization" ] diff --git a/docs/report/006.ipynb b/docs/report/006.ipynb index 3d659ade..6be62a10 100644 --- a/docs/report/006.ipynb +++ b/docs/report/006.ipynb @@ -104,9 +104,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "This report illustrates how to interact with [`matplotlib`](https://matplotlib.org) 3D plots through [Matplotlib sliders](https://matplotlib.org/stable/api/widgets_api.html) and [ipywidgets](https://ipywidgets.readthedocs.io/en/latest/examples/Widget%20List.html). This might be implemented later on in {mod}`symplot` and/or [`mpl_interactions`](https://mpl-interactions.readthedocs.io) (see [ianhi/mpl-interactions#89](https://github.com/ianhi/mpl-interactions/issues/89)).\n", "\n", @@ -342,9 +340,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "fig2, ax2 = plt.subplots(ncols=1, subplot_kw={\"projection\": \"3d\"})\n", diff --git a/docs/report/008.ipynb b/docs/report/008.ipynb index 4017907d..64f51b90 100644 --- a/docs/report/008.ipynb +++ b/docs/report/008.ipynb @@ -156,8 +156,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "keep_output": false, - "tags": [] + "keep_output": false }, "outputs": [], "source": [ @@ -169,9 +168,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "![](https://user-images.githubusercontent.com/29308176/164993648-13c6b74a-b85f-4492-aaf2-c64cdc30e345.svg)" ] diff --git a/docs/report/009.ipynb b/docs/report/009.ipynb index 6208f40e..53b6cabc 100644 --- a/docs/report/009.ipynb +++ b/docs/report/009.ipynb @@ -292,9 +292,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "n_channels = 2\n", @@ -406,9 +404,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Generalization" ] @@ -985,8 +981,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "keep_output": false, - "tags": [] + "keep_output": false }, "outputs": [], "source": [ diff --git a/docs/report/010.ipynb b/docs/report/010.ipynb index e4c67457..24bbb9a3 100644 --- a/docs/report/010.ipynb +++ b/docs/report/010.ipynb @@ -429,9 +429,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Visualization" ] @@ -722,8 +720,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "keep_output": false, - "tags": [] + "keep_output": false }, "outputs": [], "source": [ diff --git a/docs/report/011.ipynb b/docs/report/011.ipynb index a36c9bd3..b53146d2 100644 --- a/docs/report/011.ipynb +++ b/docs/report/011.ipynb @@ -105,9 +105,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Test sample" ] @@ -265,9 +263,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "class BoostZ(UnevaluatedExpression):\n", @@ -292,9 +288,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "class RotationY(UnevaluatedExpression):\n", @@ -367,9 +361,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "class ArrayMultiplication(sp.Expr):\n", @@ -513,9 +505,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def print_as_numpy(self, printer: Printer, *args) -> str:\n", @@ -641,9 +631,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def _print_BoostZ(self: NumPyPrinter, expr: BoostZ) -> str:\n", @@ -869,18 +857,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Angle computation" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Computing phi" ] @@ -1253,9 +1237,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Computing theta" ] @@ -1278,9 +1260,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "class ArrayAxisSum(sp.Expr):\n", diff --git a/docs/report/012.ipynb b/docs/report/012.ipynb index 3c743742..316dc9cb 100644 --- a/docs/report/012.ipynb +++ b/docs/report/012.ipynb @@ -258,9 +258,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Conclusion" ] @@ -268,9 +266,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [ { "data": { @@ -307,9 +303,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [ { "name": "stdout", @@ -336,9 +330,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "np.testing.assert_allclose(intensities, extended_intensities)\n", diff --git a/docs/report/013.ipynb b/docs/report/013.ipynb index b810be94..82e524af 100644 --- a/docs/report/013.ipynb +++ b/docs/report/013.ipynb @@ -76,9 +76,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Phase space sample" ] @@ -104,9 +102,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "from tensorwaves.data import TFPhaseSpaceGenerator, TFUniformRealNumberGenerator\n", @@ -177,9 +173,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "reaction = qrules.generate_transitions(\n", @@ -409,9 +403,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Generate data" ] diff --git a/docs/report/014.ipynb b/docs/report/014.ipynb index c7324d38..78aaaf24 100644 --- a/docs/report/014.ipynb +++ b/docs/report/014.ipynb @@ -165,9 +165,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "reaction = qrules.generate_transitions(\n", @@ -215,9 +213,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "builder = ampform.get_builder(reaction)\n", @@ -779,9 +775,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "assert formulate_intensity(reaction).doit() == full_expression.doit()" @@ -1105,9 +1099,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "This will become problematic when lambdifying, because it results in an additional argument in the signature of the generated function:" ] @@ -1240,9 +1232,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Inserting dynamics" ] @@ -1266,9 +1256,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_intensity_with_dynamics(\n", diff --git a/docs/report/015.ipynb b/docs/report/015.ipynb index 17a13877..9d5550cd 100644 --- a/docs/report/015.ipynb +++ b/docs/report/015.ipynb @@ -85,9 +85,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Helicity formalism" ] diff --git a/docs/report/016.ipynb b/docs/report/016.ipynb index 5a5ae86f..26e5b05f 100644 --- a/docs/report/016.ipynb +++ b/docs/report/016.ipynb @@ -29,9 +29,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# Lambdifying a symbolic integral" ] @@ -80,18 +78,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Numerical integration" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "(quad)=\n", "### SciPy's `quad()` function" @@ -99,9 +93,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "SciPy's {func}`scipy.integrate.quad` cannot integrate complex-valued functions:" ] @@ -125,9 +117,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "A [proposed solution](https://stackoverflow.com/a/5966088) is to wrap the {func}`~scipy.integrate.quad` function in a special integrate function that integrates the real and imaginary part of a function separately:" ] @@ -135,9 +125,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def complex_integrate(func, a, b, **quad_kwargs):\n", @@ -160,9 +148,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{warning}\n", "\n", @@ -173,9 +159,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "(quad_vec)=\n", "### SciPy's `quad_vec()` function" @@ -183,9 +167,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The easiest solution, however, seems to be {func}`scipy.integrate.quad_vec`:" ] @@ -193,9 +175,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "quad_vec(integrand, 0.0, 2.0)" @@ -203,9 +183,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "This has the added benefit that it can handle functions that return arrays:" ] @@ -213,9 +191,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def gaussian(x, mu, sigma):\n", @@ -229,18 +205,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Integrate with `quadpy`" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{warning}\n", "`quadpy` now requires a license. The examples below are only shown for documentation purposes.\n", @@ -249,9 +221,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "[Alternatively](https://stackoverflow.com/a/42866568), one could use [`quadpy`](https://github.com/sigma-py/quadpy), which essentially does the same as in [`quad()`](#quad), but can also (to a large degree) handle vectorized input and properly handles uncertainties. For example:\n", "\n", @@ -274,9 +244,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{note}\n", "\n", @@ -292,18 +260,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## SymPy integral" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The dispersion integral from Eq. {eq}`dispersion-integral` in **[TR-003](003.ipynb)** features a variable $s$ that is an argument to the function $\\Sigma_a$. This becomes a challenge when $s$ gets vectorized (in this case: gets an event-wise {obj}`numpy.array` of invariant masses). It seems that [`quad_vec()`](#quad_vec) can handle this well though." ] @@ -332,9 +296,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "We now attempt to design [SymPy](https://docs.sympy.org) expression classes that correctly {func}`~sympy.utilities.lambdify.lambdify` using this **vectorized** numerical integral for handles complex values. Note that this integral expression class derives from {class}`sympy.Integral ` and that:\n", "\n", @@ -347,9 +309,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "class UnevaluatableIntegral(sp.Integral):\n", @@ -386,9 +346,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "To test whether this works, test this integral expression on another {func}`~ampform.sympy.unevaluated` expression:" ] @@ -428,9 +386,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "w, a, b = sp.symbols(\"w a b\")\n", @@ -440,9 +396,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Indeed the expression correctly lambdifies correctly, despite the {meth}`~sympy.core.basic.Basic.doit` call:" ] @@ -450,9 +404,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "func = sp.lambdify([x, omega1, omega2, phi1, phi2], expr.doit())\n", @@ -482,9 +434,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "domain = np.linspace(-7, +7, num=500)\n", @@ -524,9 +474,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{tip}\n", "See how this integral expression class is applied to the phase space factor in **[TR-003](003.ipynb)**.\n", diff --git a/docs/report/017.ipynb b/docs/report/017.ipynb index adbee545..bf3a838b 100644 --- a/docs/report/017.ipynb +++ b/docs/report/017.ipynb @@ -507,9 +507,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "{{ run_interactive }}\n", "\n", @@ -530,9 +528,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "sol1, sol2 = sp.solve(kibble.doit().subs(s3, computed_s3), s2)" diff --git a/docs/report/018.ipynb b/docs/report/018.ipynb index f0015966..d56e1065 100644 --- a/docs/report/018.ipynb +++ b/docs/report/018.ipynb @@ -92,9 +92,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "import qrules\n", @@ -391,9 +389,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "from tensorwaves.function.sympy import create_parametrized_function\n", diff --git a/docs/report/021.ipynb b/docs/report/021.ipynb index d174848d..0db2b6e1 100644 --- a/docs/report/021.ipynb +++ b/docs/report/021.ipynb @@ -10,9 +10,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "" ] @@ -187,9 +185,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Amplitude model\n", "\n", @@ -251,9 +247,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Resonances and LS-scheme" ] @@ -313,8 +307,7 @@ "metadata": { "mystnb": { "code_prompt_show": "Select resonances" - }, - "tags": [] + } }, "outputs": [], "source": [ @@ -1221,9 +1214,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Helicity coupling values" ] @@ -1555,9 +1546,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def to_index(helicity):\n", @@ -1703,9 +1692,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "intensity_func = create_parametrized_function(\n", @@ -1718,9 +1705,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "polarimetry_funcs = tuple(\n", @@ -1783,9 +1768,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "kinematic_variables = {\n", @@ -1809,8 +1792,7 @@ "metadata": { "jupyter": { "source_hidden": true - }, - "tags": [] + } }, "outputs": [], "source": [ @@ -2258,7 +2240,7 @@ "source": [ "# Slider construction\n", "sliders = {}\n", - "for symbol, _value in free_parameters.items():\n", + "for symbol in free_parameters:\n", " if symbol.name.startswith(R\"\\mathcal{H}\"):\n", " real_slider = create_slider(symbol)\n", " imag_slider = create_slider(symbol)\n", diff --git a/docs/report/022.ipynb b/docs/report/022.ipynb index 939c2240..f3db0266 100644 --- a/docs/report/022.ipynb +++ b/docs/report/022.ipynb @@ -179,9 +179,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "ref = REFERENCE_SUBSYSTEM\n", @@ -240,9 +238,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Functions and data" ] @@ -660,9 +656,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "α_abs = jnp.sqrt(jnp.sum(B_norm[1:, 0] ** 2, axis=0))\n", diff --git a/docs/report/023.ipynb b/docs/report/023.ipynb index 0f42cf37..b45cd652 100644 --- a/docs/report/023.ipynb +++ b/docs/report/023.ipynb @@ -31,9 +31,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# 3D plots with Plotly\n", "" diff --git a/docs/report/024.ipynb b/docs/report/024.ipynb index bc4f41fb..3322ba3d 100644 --- a/docs/report/024.ipynb +++ b/docs/report/024.ipynb @@ -30,9 +30,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# Symbolic model serialization" ] @@ -87,18 +85,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Expression trees" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "SymPy expressions are built up from symbols and mathematical operations as follows:" ] @@ -106,9 +100,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "x, y, z = sp.symbols(\"x y z\")\n", @@ -118,9 +110,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In the back, SymPy represents these expressions as **trees**. There are a few ways to visualize this for this particular example:" ] @@ -128,9 +118,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "sp.printing.tree.print_tree(expression, assumptions=False)" @@ -161,9 +149,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Expression trees are powerful, because we can use them as templates for any human-readable presentation we are interested in. In fact, the LaTeX representation that we saw when constructing the expression was generated by SymPy's LaTeX printer." ] @@ -171,9 +157,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "src = sp.latex(expression)\n", @@ -182,9 +166,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{hint} SymPy expressions can serve as a template for generating code!\n", ":::\n", @@ -237,7 +219,7 @@ "```\n", "```rust\n", "// Rust\n", - "{sp.rust_code(expression)} \n", + "{sp.rust_code(expression)}\n", "```\n", "```xml\n", "\n", @@ -249,18 +231,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Foldable expressions" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The previous example is quite simple, but SymPy works just as well with huge expressions, as we will see in [Large expressions](#large-expressions). Before, though, let's have a look how to define these larger expressions in such a way that we can still read them. A nice solution is to define {class}`sp.Expr ` classes with the `@unevaluated` decorator (see [ComPWA/ampform#364](https://github.com/ComPWA/ampform/issues/364)). Here, we define a Chew-Mandelstam function $\\rho^\\text{CM}$ for $S$-waves. This function requires the definition of a break-up momentum $q$." ] @@ -268,9 +246,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "@unevaluated(real=False)\n", @@ -305,9 +281,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "We now have a very clean mathematical representation of how the $\\rho^\\text{CM}$ function is defined in terms of $q$:" ] @@ -315,9 +289,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "s, m1, m2 = sp.symbols(\"s m1 m2\")\n", @@ -328,9 +300,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Now, let's build up a more complicated expression that contains this phase space factor. Here, we use SymPy to derive a Breit-Wigner using a single-channel [$K$ matrix](https://doi.org/10.1002/andp.19955070504) {cite}`Chung:1995dx`:" ] @@ -338,9 +308,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "I = sp.Identity(n=1)\n", @@ -353,9 +321,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T.as_explicit()[0, 0]" @@ -363,9 +329,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Here we need to provide definitions for the matrix elements of $K$ and $\\rho$. A suitable choice is our phase space factor for $S$ waves we defined above:" ] @@ -373,9 +337,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "m0, Γ0, γ0 = sp.symbols(\"m0 Gamma0 gamma0\")\n", @@ -388,8 +350,7 @@ "metadata": { "jupyter": { "source_hidden": true - }, - "tags": [] + } }, "outputs": [], "source": [ @@ -402,9 +363,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "And there we have it! After some [algebraic simplifications](https://docs.sympy.org/latest/tutorials/intro-tutorial/simplification.html), we get a Breit-Wigner with Chew-Mandelstam phase space factor for $S$ waves:" ] @@ -412,9 +371,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T_expr = T.as_explicit().xreplace(substitutions)\n", @@ -424,9 +381,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The expression tree now has a node that is 'folded':" ] @@ -456,9 +411,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "After unfolding, we get the full expression tree of fundamental mathematical operations:" ] @@ -482,18 +435,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Large expressions" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Here, we import the large symbolic intensity expression that was used for [![10.1007/JHEP07(2023)228]()]() and see how well SymPy serialization performs on a much more complicated model." ] @@ -501,9 +450,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "DATA_DIR = Path(polarimetry.__file__).parent / \"lhcb\"\n", @@ -536,18 +483,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Serialization with `srepr`" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "SymPy expressions can directly be serialized to Python code as well, with the function [`srepr()`](https://docs.sympy.org/latest/modules/printing.html#sympy.printing.repr.srepr). For the full intensity expression, we can do so with:" ] @@ -555,9 +498,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "%%time\n", @@ -595,9 +536,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "It is up to the user, however, to import the classes of each exported node before the string can be unparsed with [`eval()`](https://docs.python.org/3/library/functions.html#eval) (see [this comment](https://github.com/ComPWA/polarimetry/issues/20#issuecomment-1809840854))." ] @@ -617,9 +556,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In the case of this intensity expression, it is sufficient to import all definition from the main `sympy` module and the `Str` class. Optionally, the required `import` statements can be embedded into the string:" ] @@ -627,9 +564,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "exec_str = f\"\"\"\\\n", @@ -644,9 +579,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "exec_filename = Path(\"../_static/exported_intensity_model.py\")\n", @@ -672,9 +605,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The parsing is then done with [`exec()`](https://docs.python.org/3/library/functions.html#exec) instead of the [`eval()`](https://docs.python.org/3/library/functions.html#eval) function:" ] @@ -685,8 +616,7 @@ "metadata": { "jupyter": { "source_hidden": true - }, - "tags": [] + } }, "outputs": [], "source": [ @@ -698,9 +628,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Notice how the imported expression is **exactly the same** as the serialized one, including assumptions:" ] @@ -708,9 +636,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "assert imported_intensity_expr == unfolded_intensity_expr\n", @@ -719,18 +645,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Common sub-expressions" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "A problem is that the expression exported generated with [`srepr()`](https://docs.sympy.org/latest/modules/printing.html#sympy.printing.repr.srepr) is not human-readable in practice for large expressions. One way out may be to extract common components of the main expression with [Foldable expressions](#foldable-expressions). Another may be to use SymPy to [detect and collect common sub-expressions](https://docs.sympy.org/latest/modules/rewriting.html#common-subexpression-detection-and-collection)." ] @@ -738,9 +660,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "sub_exprs, common_expr = sp.cse(unfolded_intensity_expr, order=\"none\")" @@ -781,9 +701,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "This already works quite well with {func}`sp.lambdify ` (without `cse=True`, this would takes minutes):" ] @@ -791,9 +709,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "%%time\n", @@ -804,18 +720,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Still, as can be seen above, there are many sub-expressions that have exactly the same form. It would be better to find those expressions that have a similar structure, so that we can serialize them to functions or custom sub-definitions." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In SymPy, the equivalence between the expressions can be determined by the [`match()`](https://docs.sympy.org/latest/modules/core.html#sympy.core.basic.Basic.match) method using [`Wild`](https://docs.sympy.org/latest/modules/core.html#sympy.core.symbol.Wild) symbols. We therefore first have to make all symbols in the common sub-expressions 'wild'. In addition, in the case of this intensity expression, some of symbols are [indexed](https://docs.sympy.org/latest/modules/tensor/indexed.html) and need to be replaced first." ] @@ -823,9 +735,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "pure_symbol_expr = unfolded_intensity_expr.replace(\n", @@ -837,9 +747,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Note that for example the following two common sub-expressions are equivalent:" ] @@ -859,9 +767,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "[`Wild`](https://docs.sympy.org/latest/modules/core.html#sympy.core.symbol.Wild) symbols now allow us to find how these expressions relate to each other." ] @@ -869,9 +775,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "is_symbol = lambda z: isinstance(z, sp.Symbol)\n", @@ -882,9 +786,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{hint}\n", "This can be used to define functions for larger, common expression blocks.\n", diff --git a/docs/report/025.ipynb b/docs/report/025.ipynb index 58edec15..c5ffc368 100644 --- a/docs/report/025.ipynb +++ b/docs/report/025.ipynb @@ -29,9 +29,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# Rotating square root cuts" ] @@ -105,9 +103,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{seealso} [Lecture 17](https://compwa.github.io/strong2020-salamanca/lecture17.html) on collision theory of the [STRONG2020 HaSP School](https://indico.ific.uv.es/event/6803) by Miguel Albaladejo.\n", "\n", @@ -118,9 +114,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In the figure below we see the Riemann surface of a square root in $\\mathbb{C}^2$ space. The $xy$ plane forms the complex domain $\\mathbb{C}$, the $z$ axis indicates the imaginary part of the Riemann surface and the color indicates the real part." ] @@ -177,9 +171,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "From this figure it becomes clear that it is impossible to define one **single-valued** function that gives the solution to $w^2 = u$ is $w \\neq 0$. The familiar single-valued square root operation $\\sqrt{}$ covers only one segment, or **sheet**, of the Riemann surface and it is defined in such a way that $\\sqrt{-1}=i$. The other half of the surface is covered by $-\\sqrt{}$.\n", "\n", @@ -269,9 +261,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "By definition, the branch cut of $\\sqrt{}$ is located at $\\mathbb{R}^-$. There is no requirement about this definition though: we can segment the Riemann surface in any way into two sheets, as long as the sheets remain single-valued. One option is to **rotate** the cut. With the following definition, we have a single-value square-root function, where the cut is rotated over an angle $\\phi$ around $z=0$." ] @@ -359,14 +349,14 @@ "ax1im, ax2im = axes[:, 1]\n", "ax_bar = axes[1, 2]\n", "ax1re.set_ylabel(f\"${sp.latex(expr)}$\")\n", - "ax1im.set_title(f\"$\\mathrm{{Im}}\\,{sp.latex(expr)}$\")\n", - "ax1re.set_title(f\"$\\mathrm{{Re}}\\,{sp.latex(expr)}$\")\n", - "ax2re.set_ylabel(\"$\\mathrm{Im}\\,z$\")\n", + "ax1im.set_title(Rf\"$\\mathrm{{Im}}\\,{sp.latex(expr)}$\")\n", + "ax1re.set_title(Rf\"$\\mathrm{{Re}}\\,{sp.latex(expr)}$\")\n", + "ax2re.set_ylabel(R\"$\\mathrm{Im}\\,z$\")\n", "for ax in (ax1im, ax1re):\n", " ax.set_yticks([-1, -0.5, 0, +0.5, +1])\n", " ax.set_yticklabels([\"-1\", R\"$-\\frac{1}{2}$\", \"0\", R\"$+\\frac{1}{2}$\", \"+1\"])\n", "for ax in axes[:, :2].flatten():\n", - " ax.set_xlabel(\"$\\mathrm{Re}\\,z$\")\n", + " ax.set_xlabel(R\"$\\mathrm{Re}\\,z$\")\n", " ax.set_xticks([-1, 0, +1])\n", " ax.set_xticklabels([\"-1\", \"0\", \"+1\"])\n", " ax.set_yticks([-1, 0, +1])\n", @@ -446,9 +436,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{note}\n", "The real part does not have a cut if $\\phi = 2\\pi n, n \\in \\mathbb{Z}$. The cut in the imaginary part disappears if $\\phi = \\pi + 2\\pi n$.\n", diff --git a/docs/report/026.ipynb b/docs/report/026.ipynb old mode 100755 new mode 100644 index e2942936..e0ee3d1f --- a/docs/report/026.ipynb +++ b/docs/report/026.ipynb @@ -30,18 +30,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# Single-channel Riemann sheets" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The $T$ function can be extended into the complex plane. This results in $2^n$ Riemann sheets for $n$ channels, each starting at the threshold $s_{thr}=(m_1+m_2)^{2}$ of the two final state particles, the so-called branching point of the respective channel going along the so-called branch cut along the real axis where the function is not uniquely defined to $+\\infty$. This choice of the direction of the brach cut is most commonly used in particle physics. The physical Riemann sheet is defined for positive imaginary part (1st quadrant of the complex plane) and the unphysical Riemann sheets are only defined for negative imaginary part (4th quadrant of the complex plane). For the single-channel case there are two Riemann sheets, one physical and one unphysical." ] @@ -95,9 +91,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Phase space factor definitions" ] @@ -192,9 +186,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## T matrix definition with K matrix" ] @@ -210,8 +202,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "cellView": "form", - "tags": [] + "cellView": "form" }, "outputs": [], "source": [ @@ -225,9 +216,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T1 = (I + sp.I * K * CM).inv() * K\n", @@ -237,9 +226,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T1_explicit = T1.as_explicit()\n", @@ -279,9 +266,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T1_expr.simplify(doit=False)" @@ -337,9 +322,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "rho = sp.MatrixSymbol(\"rho\", n, n)\n", @@ -364,9 +347,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T2_expr.simplify(doit=False)" @@ -374,9 +355,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Visualization of the 2 dimensional lineshape" ] @@ -479,9 +458,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The Amplitude for the second sheet is only defined for $s$ positive real part and negative complex part. It inherits the analytic structure of the phasespace factor $\\rho$ (the branch cut starting form zero and from $s=s_{thr}$ on the real axis). So it is only defined up to the closest branch cut which is in this case the cut at $s=s_{thr}$." ] @@ -619,18 +596,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The lineshape, the part that is observed within the experiment, is given as the intersection of the Riemann sheets with real plane. Also note that the second Riemann sheets transitions smoothly into the first one. " ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{attention}\n", ":name: Discontinuity\n", diff --git a/docs/report/027.ipynb b/docs/report/027.ipynb old mode 100755 new mode 100644 index 688f228f..f3ec3b97 --- a/docs/report/027.ipynb +++ b/docs/report/027.ipynb @@ -30,9 +30,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# Coupled channel Riemann sheets\n" ] @@ -139,9 +137,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Expression definitions" ] @@ -266,18 +262,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Riemann sheet I" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Matrix definition" ] @@ -312,9 +304,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T_I = (I - sp.I * K * CM).inv() * K\n", @@ -324,9 +314,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T_I_explicit = T_I.as_explicit()\n", @@ -335,9 +323,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Parametrization" ] @@ -404,9 +390,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T_I_cm_expr = T_I_explicit.xreplace(cm_expressions)\n", @@ -415,18 +399,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Sheets II, III, and IV" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In the case of two channels, there are four Riemann sheets. The first sheet ([Sheet I](#riemann-sheet-i)) is physical and three unphysical ones. The physical sheet is calculated using the analytic solution of the Chew-Mandelstam function.\n", "\n", @@ -520,9 +500,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "rho_expressions_II = {\n", @@ -559,9 +537,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T_II_rho_expr[0, 0].simplify(doit=False)" @@ -570,9 +546,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "T_III_rho_expr[0, 0].simplify(doit=False)" @@ -696,18 +670,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Complex plane (2D)" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "It can be shown that if the resonance mass is above both thresholds the third sheet connects smoothly to the first sheet. If the resonance mass is above the first and below the second threshold the second sheet transitions smoothly into the first sheet." ] @@ -785,8 +755,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "### Riemann sheets (3D)" diff --git a/docs/report/028.ipynb b/docs/report/028.ipynb old mode 100755 new mode 100644 index 70be8d29..d5728a27 --- a/docs/report/028.ipynb +++ b/docs/report/028.ipynb @@ -29,9 +29,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# PDG Python API: decay query" ] @@ -65,9 +63,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "import pdg\n", @@ -77,9 +73,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "This creates a [`PdgApi`](https://pdgapi.lbl.gov/doc/pdg.api.html#pdg.api.PdgApi) instance containing the following type of objects:" ] @@ -87,9 +81,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "{type(obj) for obj in PDG.get_all()}" @@ -97,9 +89,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In this example, we ask the question **which particles can decay to a final state with three equal particles?** For this, we use [`PdgBranchingFraction`](https://pdgapi.lbl.gov/doc/pdg.decay.html#pdg.decay.PdgBranchingFraction)s, which contain information about particle decays in their [`description`](https://pdgapi.lbl.gov/doc/pdg.data.html#pdg.data.PdgData.description):" ] @@ -117,9 +107,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "jpsi_decay.description" @@ -127,9 +115,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "So, let's pull all the decay descriptions from the PDG and do some clean up with {meth}`str.strip` and {obj}`set`:" ] @@ -137,9 +123,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "from pdg.decay import PdgBranchingFraction\n", @@ -151,9 +135,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "To get more insight into the decay products, we create a new {obj}`set` of decay descriptions, but now describe each item as an initial state with a {obj}`tuple` of decay products. We again have to do a bit of cleaning here. The final state description sometimes contains digits, like `\"3pi0\"`, which we want to be rendered as `(\"pi0\", \"pi0\", \"pi0\")`.\n", "\n", @@ -201,16 +183,15 @@ "for description in decay_descriptions:\n", " initial_state, *final_states = description.split(\" --> \")\n", " initial_state = initial_state.strip()\n", - " for final_state in final_states:\n", - " decays.add((initial_state, create_final_state(final_state)))\n", + " decays.update(\n", + " (initial_state, create_final_state(final_state)) for final_state in final_states\n", + " )\n", "len(decays)" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Now selecting the three-body decays is an easy matter using filters on comprehensions." ] @@ -218,9 +199,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "three_body_decays = {\n", @@ -234,9 +213,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "equal_state_3body_decays = {\n", @@ -249,9 +226,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Finally, and optionally, we can filter out final states that are not well defined, such as `g g g`, by checking whether they are defined in the PDG database." ] @@ -281,9 +256,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{warning}\n", "Not all final state in the [`description`](https://pdgapi.lbl.gov/doc/pdg.data.html#pdg.data.PdgData.description)s can be programmatically deciphered as individual particles. One could try to use [regular expressions](https://docs.python.org/3/howto/regex.html), but it's hard to cover all cases. Consider for instance the following case which contains $S$ and $D$ waves.\n", @@ -293,9 +266,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "[dec for dec in decay_descriptions if dec.startswith(\"a_1(1260)\")]" @@ -303,9 +274,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Additionally, not all decays seem to be included. Here is an attempt to find $J/\\psi \\to \\pi^0 \\pi^0 \\pi^0$." ] diff --git a/docs/report/029.ipynb b/docs/report/029.ipynb index de528a8c..04ae7988 100644 --- a/docs/report/029.ipynb +++ b/docs/report/029.ipynb @@ -2,9 +2,7 @@ "cells": [ { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "```{autolink-concat}\n", "```" @@ -30,9 +28,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# Blatt–Weisskopf from Hankel function" ] @@ -83,9 +79,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "As of AmpForm [v0.15](https://github.com/ComPWA/ampform/releases/tag/0.15.1), the implementation of [`BlattWeisskopfSquared`](https://ampform.readthedocs.io/0.15.x/api/ampform.dynamics/#ampform.dynamics.BlattWeisskopfSquared) contains hard-coded polynomials, see implementation [here](https://github.com/ComPWA/ampform/blob/0.15.1/src/ampform/dynamics/__init__.py#L66-L134).\n", "The motivation for this can be found in the citations mentioned in [its API documentation](https://ampform.readthedocs.io/0.15.x/api/ampform.dynamics/#ampform.dynamics.BlattWeisskopfSquared).\n", @@ -98,9 +92,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "```{math}\n", "F_\\ell^2(z^2) = \\frac{1}{z^2\\left|h^{(1)}_\\ell\\left(z\\right)\\right|^2}\\,,\n", @@ -109,18 +101,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "where $h_\\ell^{(1)}$ is a Hankel function of the first kind. They also noted that, if $z\\in\\mathbb{R}$," ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "$$\n", "h_\\ell^{(1)}(z) =\n", @@ -134,9 +122,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In the following, we call $F_\\ell(z)$ the _unnormalized_ Blatt–Weisskopf form factor.\n", "Following Chung and other resources (see e.g. {cite}`Chung:1995dx`, p. 415), AmpForm implements a unitless, _normalized_ Blatt–Weisskopf factor $B_L$, meaning that $B_L(1)=1$.[^3]\n", @@ -147,9 +133,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "```{math}\n", "B_L^2(z^2)\n", @@ -175,18 +159,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Hankel function of the first kind" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Built-in SymPy function" ] @@ -232,18 +212,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Custom class definition" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "To implement Equation {eq}`hankel-sum` for the _spherical_ Hankel function, we have to define a custom [`@unevaluated`](https://ampform.readthedocs.io/0.15.x/api/ampform.sympy/#ampform.sympy.unevaluated) expression class.\n", "The following class evaluates to the sum given in Equation {eq}`hankel-sum`.\n", @@ -358,18 +334,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Normalized Blatt–Weisskopf form factor" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "We now have the required expression classes for re-implementing [`BlattWeisskopfSquared`](https://ampform.readthedocs.io/0.15.x/api/ampform.dynamics/#ampform.dynamics.BlattWeisskopfSquared) using Equation {eq}`blatt-weisskopf` (with $z$ as input, instead of $z^2$)." ] @@ -377,9 +349,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "@unevaluated\n", @@ -403,9 +373,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{note}\n", "An explicit [`simplify()`](https://docs.sympy.org/latest/tutorials/intro-tutorial/simplification.html#simplify) is required in order to reproduce the polynomial form upon evaluation.\n", @@ -434,9 +402,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Indeed the polynomials are exactly the same as the [original `BlattWeisskopfSquared`](https://ampform.readthedocs.io/0.15.x/api/ampform.dynamics/#ampform.dynamics.BlattWeisskopfSquared)!" ] @@ -460,18 +426,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Nested doit" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Eventually, the barrier factors take $z=q/q_R$, with $q$ the break-up momentum and $q_R$ an impact factor. Here it becomes crucial that only $\\left|h_\\ell^{(1)}(z)\\right|^2$ is simplified to a polynomial fraction, not $q$ itself. The break-up momentum does need to unfold though." ] @@ -496,9 +458,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Symbolic angular momentum" ] @@ -506,9 +466,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "BlattWeisskopfSquared(L, z=q2 / qR**2).doit(deep=False)" @@ -529,9 +487,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Numeric angular momentum" ] @@ -539,9 +495,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "BlattWeisskopfSquared(L=2, z=q2 / qR**2).doit(deep=False)" diff --git a/docs/report/030.ipynb b/docs/report/030.ipynb index bb0bdc35..ed906bb3 100644 --- a/docs/report/030.ipynb +++ b/docs/report/030.ipynb @@ -2,9 +2,7 @@ "cells": [ { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "```{autolink-concat}\n", "```" @@ -32,9 +30,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# Sub-intensities of P vector" ] @@ -124,9 +120,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Studied decay" ] @@ -181,9 +175,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "reaction = qrules.generate_transitions(\n", @@ -215,9 +207,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Amplitude builder" ] @@ -276,9 +266,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "model_builder = ampform.get_builder(reaction)\n", @@ -337,27 +325,21 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Dynamics parametrization" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Phasespace factor" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{seealso}\n", "**[TR-026](./026.ipynb)** and **[TR-027](./027.ipynb)** on analyticity and Riemann sheets.\n", @@ -446,9 +428,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Relativistic Breit-Wigner" ] @@ -456,9 +436,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "PARAMETERS_BW = dict(model.parameter_defaults)" @@ -467,9 +445,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_breit_wigner(\n", @@ -523,9 +499,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### $P$ vector" ] @@ -533,9 +507,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "PARAMETERS_F = dict(model.parameter_defaults)" @@ -544,9 +516,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_k_matrix(\n", @@ -566,9 +536,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_p_vector(\n", @@ -590,9 +558,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_f_vector(\n", @@ -637,18 +603,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Create numerical functions" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### Amplitude model function" ] @@ -656,9 +618,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "full_expression_bw = perform_cached_doit(model_bw.expression).xreplace(\n", @@ -674,9 +634,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "full_expression_fvector = perform_cached_doit(model_fvector.expression).xreplace(\n", @@ -691,9 +649,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### Dynamics function" ] @@ -737,9 +693,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "dynamics_func_bw = create_parametrized_function(\n", @@ -752,9 +706,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "dynamics_func_fvector = create_parametrized_function(\n", @@ -766,18 +718,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Generate data" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Generate phase space sample" ] @@ -785,9 +733,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "rng = TFUniformRealNumberGenerator(seed=0)\n", @@ -805,9 +751,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Update function parameters" ] @@ -815,9 +759,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "m_res1 = 1.82\n", @@ -829,9 +771,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "toy_parameters_bw = {\n", @@ -847,9 +787,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "toy_parameters_fvector = {\n", @@ -866,18 +804,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Plots" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Sub-intensities" ] @@ -926,9 +860,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "total_intensities_bw = intensity_func_bw(phsp)\n", @@ -947,9 +879,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "total_intensities_fvector = intensity_func_fvector(phsp)\n", @@ -1077,9 +1007,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Argand plots" ] @@ -1087,9 +1015,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "ε = 1e-8\n", @@ -1100,9 +1026,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "total_dynamics_bw = dynamics_func_bw(plot_data)\n", @@ -1132,9 +1056,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "x1 = np.linspace(2.0, (m_res1**2 + m_res2**2) / 2, num=500)\n", @@ -1237,9 +1159,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Phase" ] @@ -1247,9 +1167,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "total_phase_bw = np.angle(total_dynamics_bw)\n", diff --git a/docs/report/031.ipynb b/docs/report/031.ipynb index 57cb8c89..3f36fe4d 100644 --- a/docs/report/031.ipynb +++ b/docs/report/031.ipynb @@ -2,9 +2,7 @@ "cells": [ { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "```{autolink-concat}\n", "```" @@ -32,9 +30,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# P-vector model fit, single channel" ] @@ -130,9 +126,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Studied decay" ] @@ -189,9 +183,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "reaction = qrules.generate_transitions(\n", @@ -223,9 +215,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Amplitude builder" ] @@ -284,9 +274,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "model_builder = ampform.get_builder(reaction)\n", @@ -345,27 +333,21 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Dynamics parametrization" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Phasespace factor" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{seealso}\n", "**[TR-026](./026.ipynb)** and **[TR-027](./027.ipynb)** on analyticity and Riemann sheets.\n", @@ -454,9 +436,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Relativistic Breit-Wigner" ] @@ -464,9 +444,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "PARAMETERS_BW = dict(model.parameter_defaults)" @@ -475,9 +453,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_breit_wigner(\n", @@ -531,9 +507,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### $P$ vector" ] @@ -541,9 +515,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "PARAMETERS_F = dict(model.parameter_defaults)" @@ -552,9 +524,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_k_matrix(\n", @@ -574,9 +544,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_p_vector(\n", @@ -598,9 +566,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def formulate_f_vector(\n", @@ -645,9 +611,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Create numerical functions" ] @@ -655,9 +619,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "full_expression_bw = perform_cached_doit(model_bw.expression).xreplace(\n", @@ -673,9 +635,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "full_expression_fvector = perform_cached_doit(model_fvector.expression).xreplace(\n", @@ -690,18 +650,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Generate data" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Generate phase space sample" ] @@ -709,9 +665,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "rng = TFUniformRealNumberGenerator(seed=0)\n", @@ -729,9 +683,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Update function parameters" ] @@ -739,9 +691,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "toy_parameters_bw = {\n", @@ -760,9 +710,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "toy_parameters_fvector = {\n", @@ -784,9 +732,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Plot sub-intensities" ] @@ -835,9 +781,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "total_intensities_bw = intensity_func_bw(phsp)\n", @@ -856,9 +800,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "total_intensities_fvector = intensity_func_fvector(phsp)\n", @@ -986,9 +928,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Weighted data with $F$ vector " ] @@ -1023,9 +963,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "weighted_phsp_generator = TFWeightedPhaseSpaceGenerator(\n", @@ -1076,18 +1014,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Perform fit" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Estimator definition" ] @@ -1095,9 +1029,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "estimator_bw = UnbinnedNLL(\n", @@ -1116,9 +1048,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Initial parameters" ] @@ -1203,9 +1133,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "initial_parameters_beta = {\n", @@ -1239,9 +1167,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "original_parameters_bw = dict(intensity_func_bw.parameters)\n", @@ -1253,9 +1179,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Optimize parameters" ] @@ -1263,9 +1187,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "minuit2 = Minuit2()" @@ -1304,9 +1226,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "intensity_func_fvector.update_parameters(fit_result_fvector.parameter_values)\n", @@ -1316,9 +1236,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Fit result comparison" ] @@ -1367,9 +1285,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### P vector" ] @@ -1377,9 +1293,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "compute_aic_bic(fit_result_fvector)" @@ -1407,9 +1321,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### Breit–Wigner" ] @@ -1417,9 +1329,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "compute_aic_bic(fit_result_bw)" diff --git a/docs/report/032.ipynb b/docs/report/032.ipynb index 1bdf2969..963f9dfe 100644 --- a/docs/report/032.ipynb +++ b/docs/report/032.ipynb @@ -2,9 +2,7 @@ "cells": [ { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "```{autolink-concat}\n", "```" @@ -32,9 +30,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# P-vector fit comparison" ] @@ -138,9 +134,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Studied decay" ] @@ -194,9 +188,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "FINAL_STATES: list[tuple[str, ...]] = [\n", @@ -238,9 +230,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Amplitude builder" ] @@ -299,9 +289,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "MODELS: list[HelicityModel] = []\n", @@ -362,27 +350,21 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Dynamics parametrization" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Phasespace factor" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{seealso}\n", "**[TR-026](./026.ipynb)** and **[TR-027](./027.ipynb)** on analyticity and Riemann sheets.\n", @@ -471,9 +453,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### $K$-matrix formalism" ] @@ -481,9 +461,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "n_channels = len(REACTIONS)\n", @@ -543,8 +521,7 @@ "metadata": { "jupyter": { "source_hidden": true - }, - "tags": [] + } }, "outputs": [], "source": [ @@ -561,9 +538,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### $K$-matrix parametrization" ] @@ -612,9 +587,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### $P$-vector parametrization" ] @@ -663,9 +636,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### Phase space factor parametrization" ] @@ -710,18 +681,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### $F$-vector construction" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{note}\n", "For some reason one has to leave out the multiplication of $\\rho$ by $i$ within the calculation of the $F$ vector\n", @@ -731,9 +698,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "F = (I - sp.I * K * rho).inv() * P\n", @@ -770,9 +735,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Create numerical functions" ] @@ -807,9 +770,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "FULL_EXPRESSIONS_FVECTOR = [\n", @@ -823,9 +784,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "INTENSITY_FUNCS_FVECTOR = [\n", @@ -855,9 +814,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "HELICITY_TRANSFORMERS = [\n", @@ -869,9 +826,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "PHSP = []\n", @@ -1021,9 +976,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Toy data sample" ] @@ -1031,9 +984,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "DATA = []\n", @@ -1088,18 +1039,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "## Perform fit" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Estimator definition" ] @@ -1107,9 +1054,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "class EstimatorSum(Estimator):\n", @@ -1128,9 +1073,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "combined_estimators = EstimatorSum(\n", @@ -1146,9 +1089,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Initial parameters " ] @@ -1259,9 +1200,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Optimize parameters" ] @@ -1269,9 +1208,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "minuit2 = Minuit2()\n", @@ -1300,9 +1237,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Fit quality check" ] @@ -1352,9 +1287,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "compute_aic_bic(fit_result)" diff --git a/docs/report/033.ipynb b/docs/report/033.ipynb index 1a13d126..2ab6d5ea 100644 --- a/docs/report/033.ipynb +++ b/docs/report/033.ipynb @@ -36,9 +36,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "# Amplitude Analysis 101 (PWA 101)" ] @@ -58,9 +56,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{admonition} Abstract\n", "This document introduces Amplitude Analysis / Partial Wave Analysis (PWA) by demonstrating its application to a specific reaction channel and amplitude model. It aims to equip readers with a basic understanding of the full workflow and methodologies of PWA in hadron physics through a practical, hands-on example. Only basic Python programming and libraries (e.g. [`numpy`](https://numpy.org/doc/stable), [`scipy`](https://docs.scipy.org/doc/scipy), etc.) are used to illustrate the more fundamental steps in a PWA. Calculations with 4-vectors in this report are performed with the [`vector`](https://vector.readthedocs.io/en/latest/usage/intro.html) package.\n", @@ -69,9 +65,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{seealso}\n", "A follow-up tutorial, [PWA101 v2.0](https://compwa.github.io/gluex-nstar), is being prepared in [ComPWA/gluex-nstar#13](https://github.com/ComPWA/gluex-nstar/pull/13). Whereas this report focuses on common, numerical Python libraries, v2.0 formulates the amplitude model with a {doc}`symbolic approach`.\n", @@ -151,9 +145,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "(amplitude-model)=\n", "## Amplitude model" @@ -170,9 +162,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "\n", "This amplitude model is adapted from the [Lecture 11 in STRONG2020 HaSP School](https://indico.ific.uv.es/event/6803/contributions/21223/) by Vincent Mathieu.\n", @@ -195,18 +185,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In the following, we denote $1 \\equiv \\eta, 2 \\equiv \\pi^0, 3 \\equiv p$. Given these three subsystems in this particle transition, we can construct three amplitudes," ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "$$\n", "\\begin{eqnarray}\n", @@ -219,9 +205,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "where $s, t, u$ are the Mandelstam variables ($s_{ij}=(p_i+p_j)^2$, $t_i=(p_a-p_i)^2$, and $u_i=(p_b-p_i)^2$), $m$ is the resonance mass, $\\Gamma$ is the resonance width, $Y^m_l$ are spherical harmonics functions, $\\Omega_i$ are pairs of Euler angles (polar angle $\\theta$ and azimuthal angle $\\phi$) that represent the helicity decay angles, and $a_i$, $b_i$, and $c_i$ are complex-valued coefficients. Note that the Mandelstam variables and angles come from measured events, while the other symbols are parameters that need to be modified in order to have the amplitude model match the data." ] @@ -237,9 +221,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The original full amplitude model from the [Lecture 11 in STRONG2020 HaSP School](https://indico.ific.uv.es/event/6803/contributions/21223/) is shown in Equation {eq}`full-model-with-exponential`.\n", "*In this report, only the Breit–Wigner and Spherical harmonics terms are kept, while the exponential factor is abandoned, i.e.*" @@ -247,9 +229,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "$$\n", "\\begin{eqnarray}\n", @@ -262,9 +242,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The intensity $I$ that describes our measured distributions is then expressed as a coherent sum of the amplitudes $A^{ij}$,\n", "\n", @@ -277,18 +255,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Ultimately, the amplitude model in Equations {eq}`model-dynamics-and-angular` and {eq}`intensity-coherent-sum` in this tutorial consists of three resonances, and each of them are formed by two components: a Breit-Wigner times some spherical harmonics ($l = 2, 1, 0$)." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{admonition} Assumption: spinless final state!\n", ":class: dropdown\n", @@ -333,8 +307,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "(breit-wigner-model)=\n", @@ -364,9 +337,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def BW_model(s12, s23, s31, *, M12, Gamma12, M23, Gamma23, M31, Gamma31, **kwargs):\n", @@ -383,8 +354,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "(spherical-harmonics-model)=\n", @@ -393,9 +363,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The calculation of $Y_l^m(\\phi, \\theta)$ is done via [`scipy.special.sph_harm()`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.special.sph_harm.html). However, we use a different definition of $\\phi$ and $\\theta$, following a notation that is more common in hadron physics.\n", "- $\\phi$ is the **azimuthal angle** and ranges from -$\\pi$ to $\\pi$. SciPy represents this as $\\theta$, ranging from $0$ to $2\\pi$.\n", @@ -410,9 +378,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{tip}\n", "Alternatively, we can formulate the spherical harmonics in terms of a Wigner-$d$ or Wigner-$D$ function, as\n", @@ -427,9 +393,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In the following, we define functions to compute the spherical harmonics of the three subsystem amplitudes in Equation {eq}`model-dynamics-and-angular`. Note how the function signature consists of two input data columns, `theta` and `phi`, and how the rest of the arguments are parameters. The final `kwargs` (key-word arguments) is there so that we can compose larger functions from these function definitions." ] @@ -437,9 +401,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def Ylm12(\n", @@ -465,9 +427,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def Ylm23(\n", @@ -483,9 +443,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def Ylm31(theta: np.ndarray, phi: np.ndarray, c_0: complex, **kwargs) -> np.ndarray:\n", @@ -515,9 +473,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def SH_model(phi1, theta1, phi2, theta2, *, c_0, **pars):\n", @@ -527,8 +483,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### Breit-Wigner $\\times$ Spherical Harmonics Model" @@ -558,9 +513,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def BW_SH_model(\n", @@ -590,8 +543,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "## Visualization" @@ -607,8 +559,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "### Phase space generation" @@ -616,9 +567,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "First, however, we need to generate a phase space sample of four-momenta in order to plot the amplitude model as a distribution over each of the variables. In this section, we use the [`phasespace`](https://github.com/zfit/phasespace) package for generating four-momenta for the reaction $p\\gamma \\to p\\eta\\pi^0$. The phase space sample will also be used later on to normalize the model when calculating the likelihood over the data sample (see [](#fit-model))." ] @@ -626,8 +575,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### Lab frame and CM frame" @@ -635,18 +583,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In Center-of-Mass (CM) frame, the 4-momentum of the total system can be acquired by 4-momentum conservation:" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "$$\\begin{pmatrix}\n", " E_0 \\\\\n", @@ -700,9 +644,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{caution}\n", "The calculation here involved using values from the CM frame. While this frame is commonly used for theoretical calculations, experimental data is often analyzed in the lab frame. However, it's worth noting that oin some collider experiments that do not have a fixed target, the CM frame can coincide with the lab frame.\n", @@ -711,18 +653,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The [GlueX](http://www.gluex.org/) experiment at Jefferson Lab uses a fixed proton target with a linearly polarized photon beam, and the beam energy range in the lab frame is typically from [**8 to 9 GeV**](https://doi.org/10.7566/JPSCP.26.022002)." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "We use the $\\gamma$ beam energy in the lab frame as input, e.g. $E_{\\gamma, \\text{lab}} = 8.5 \\; \\text{GeV}$, and want to know the collision energy in the CM frame.\n", "\n", @@ -747,9 +685,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The proton (target) is at rest in the lab frame, so we have\n", "\n", @@ -778,9 +714,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Equivalently, from the **CM frame** perspective, since $\\vec{p}_{\\gamma} = -\\vec{p}_{p}$ and $|\\vec{p}_{\\gamma}| = |\\vec{p}_{p}|= p_{z}$, we find\n", "\n", @@ -806,9 +740,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Our implementation based on Equation {eq}`total-energy-cm` thus becomes:" ] @@ -816,9 +748,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "E_lab_gamma = 8.5\n", @@ -850,9 +780,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Thus, we then have the mass of the system $m_0$ (or the mass of a 'virtual' particle $p\\gamma$) in CM frame of\n", "\n", @@ -864,8 +792,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### Final state four-momenta" @@ -873,9 +800,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The [`phasespace`](https://github.com/zfit/phasespace) library is a Python package designed to simulate particle decays according to the principles of relativistic kinematics and phase space distributions. We first use the [`phasespace`](https://github.com/zfit/phasespace) to generate decay particles." ] @@ -883,9 +808,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "phsp_events = 500_000\n", @@ -896,9 +819,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Note that each event comes with a statistical weights for each generated event. These weights represent how likely each particular event configuration (set of momenta and energies) is, based on phase space considerations. In order to generate a flat distribution, we will have to use a hit-and-miss method over these weights." ] @@ -968,9 +889,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{admonition} Hit-and-miss sampling\n", ":class: dropdown\n", @@ -991,8 +910,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### Initial state four-momenta" @@ -1000,18 +918,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Because of the simple relation in Equation {eq}`four-momentum-conservation`, the four-momenta for the initial state, $\\gamma$ (a) and $p$ (b), do not have to be generated by a phase space generator." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "To find $p_a$ and $p_b$ by 4-momentum conservation, we can use\n", "\n", @@ -1064,9 +978,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "With this, our numerical implementation to compute four-momentum of $p_a$ and $p_b$ from Equation {eq}`pb-pa-relation` becomes:" ] @@ -1077,8 +989,7 @@ "metadata": { "jupyter": { "source_hidden": true - }, - "tags": [] + } }, "outputs": [], "source": [ @@ -1099,8 +1010,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### Four-momenta of all particles" @@ -1108,9 +1018,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "We now create a function `generate_phsp_all()` to create and compute all particles in phase space all-at-once, combining the two functions from the previous sections into one function." ] @@ -1118,9 +1026,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def generate_phsp_all(\n", @@ -1136,9 +1042,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "%%time\n", @@ -1150,8 +1054,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "### Kinematic variable calculation" @@ -1160,8 +1063,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### Spherical coordinate system" @@ -1169,18 +1071,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Before introducing CM and helicity angles, we first introduce **polar angles** and **azimuthal angles** in spherical coordinates." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In the spherical coordinates, the **polar angle** $\\theta$ and **azimuthal angle** $\\phi$ are defined as\n", "\n", @@ -1208,18 +1106,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In Equation {eq}`polar-azimuthal-angle`, $p_z$ is equivalent to $z$, and $|p|$ is $r$ in figure above, while $p_y$ equivalent to $y$, and $p_x$ is $x$." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The sample is plotted to check whether the distribution looks uniformly distributed in the Dalitz plane. The Mandelstam variable $s$ of each of the three subsystems can be easily computed with from the four-momentum objects as follows. Here, we use the methods and attributes provided by the [`vector`](https://vector.readthedocs.io) package." ] @@ -1291,9 +1185,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{tip}\n", "There are different ways to represent the Dalitz plot, each with its advantages.\n", @@ -1309,8 +1201,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### CM Angles " @@ -1318,18 +1209,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Angles in the CM frame are the polar and azimuthal angles of the spatial components of the four-momenta in the CM frame (i.e. the frame that satisfies the relations in Equation {eq}`four-momentum-conservation`). They are different than the [helicity angles](#helicity-angles) in each subsystem (which is after rotation and boost into the subsystem)." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The values for phase space can be obtained directly in the CM frame, without boosting into a different frame after the generation.\n", "We denote these angles as\n", @@ -1342,9 +1229,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "theta1_CM_phsp = p1_phsp.theta\n", @@ -1358,8 +1243,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### Helicity angles" @@ -1367,9 +1251,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{seealso}\n", "- [Section of helicity formalism in **TR-015** (Spin alignment implementation)](https://compwa.github.io/report/015.html#helicity-formalism)\n", @@ -1379,9 +1261,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The amplitude model shown in Equation {eq}`model-dynamics-and-angular` derives from the helicity formalism. This spin formalism breaks up the transition amplitudes of a decay chain into two-body decays (**isobar model**), which allows separating angular (spin) dependence from dynamics (e.g. Breit–Wigner).\n", "\n", @@ -1390,9 +1270,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{admonition} Helicity states and Wigner matrices\n", ":class: dropdown\n", @@ -1417,9 +1295,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "As can be seen in the amplitude model of Equation {eq}`model-dynamics-and-angular`, that the rotations (represented by $Y^m_l$) contain solid angles $\\Omega=\\phi,\\theta$ (see [spherical coordinates](#spherical-coordinate-system)). They have to be computed in the **helicity frame** of the resonance, which means we have to boost into each of the three subsystems. For instance, for the $a_2$ resonance ($A^{12}$), this would be a boost into subsystem $p_1+p_2$, plus a rotation such that the $z$ axis points in the direction of $p_1+p_2$. The **helicity angles** $\\phi$ and $\\theta$ can then easily be computed from the [spherical coordinates](#spherical-coordinate-system) of the (boosted) $p_1'$ or $p_2'$." ] @@ -1450,8 +1326,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "#### Numerical angle computation" @@ -1459,9 +1334,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "To calculate the helicity angles $\\theta$ and $\\phi$, we define functions for boosting a combination of boost and rotation (around the $y$ and $z$ axis)." ] @@ -1469,9 +1342,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def theta_helicity(p_i: MomentumNumpy4D, p_ij: MomentumNumpy4D) -> np.ndarray:\n", @@ -1491,9 +1362,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "theta1_phsp = theta_helicity(p1_phsp, p12_phsp)\n", @@ -1506,9 +1375,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The distribution of the phase space sample over the kinematic variables is shown later in [this section](#my_section), together the generated data and weighted phase space (model)." ] @@ -1516,8 +1383,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "(parameter-values)=\n", @@ -1526,9 +1392,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Now that we have a numerical implementation for the amplitude model of Equation {eq}`model-dynamics-and-angular` and a means to compute the kinematic variables appearing in that expression, we are ready to visualize the model. First, however, we have to decide on some toy values for the parameters in the model. The toy model parameter values can be obtained from the data file from the [Lecture 11 in STRONG2020 HaSP School](https://indico.ific.uv.es/event/6803/contributions/21223/). In this tutorial, the values are modified to make the structures in the Dalitz plot more visible." ] @@ -1536,9 +1400,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "toy_parameters = dict(\n", @@ -1562,9 +1424,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Note that the masses $(M)$ and widths $(\\Gamma)$ are properties of the three resonances.\n", "\n", @@ -1583,8 +1443,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "(spherical-harmonics-visualization)=\n", @@ -1593,9 +1452,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "We now have a look at the real part and imaginary part of $\\sum a_m Y_2^m (\\Omega_1)$ as well as $\\sum b_m Y_1^m (\\Omega_2)$ in Equation {eq}`model-dynamics-and-angular`. For this, we define a grid of values for $\\phi$ and $\\theta$ over which to visualize the amplitudes." ] @@ -1603,9 +1460,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "PHI, THETA = np.meshgrid(\n", @@ -1719,8 +1574,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "### Dalitz Plots of (sub)models" @@ -1728,9 +1582,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Dalitz plots are ideal for visualizing the model over the Mandelstam variables $s_{12}$, $s_{23}$, and $s_{31}$. In the following, we plot each of the 'sub-models' separately: Breit-Wigner (only), Spherical Harmonics (only), and Breit-Wigner $\\times$ Spherical Harmonics.\n", "\n", @@ -1848,9 +1700,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "(generate-data)=\n", "## Data Generation" @@ -1858,9 +1708,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In an actual amplitude analysis, we are now ready to 'fit' the model we formulated to a measured data distribution. In this tutorial, however, we generate the data ourselves and then perform a test fit using a starting model with some 'guessed' parameter values." ] @@ -1868,8 +1716,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "### Hit and miss intensity sample" @@ -1877,9 +1724,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The data can be generated with a similar hit-and-miss strategy as we saw in [](#phase-space-generation). In this case, the hit-and-miss is performed over the intensities computed from the model.\n", "\n", @@ -1985,9 +1830,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{admonition} Hit-and-miss data generation\n", ":class: dropdown\n", @@ -2039,9 +1882,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Now we can use the function `generate_data()` to generate our data sample:" ] @@ -2049,9 +1890,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "data_events = 100_000\n", @@ -2101,8 +1940,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "### Dalitz plot of data distribution" @@ -2144,9 +1982,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "![](https://github.com/user-attachments/assets/5f22c929-3ea8-49c9-a9ce-5db175b829f1)" ] @@ -2154,8 +1990,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "(my_section)=\n", @@ -2164,9 +1999,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The 1D projection distribution of CM angles, the helicity angles and invariant mass of the model, phase space, and data are shown in this section." ] @@ -2907,9 +2740,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "(fit-model)=\n", "## Fitting " @@ -2917,18 +2748,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "We now pretend that we do not know the parameter values for the data distribution. In this particular situation, the parameters (to be fitted) are mass $M$ ($M_{12}$, $M_{23}$, and $M_{31}$) and decay width $\\Gamma$ ($\\Gamma_{12}$, $\\Gamma_{23}$, and $\\Gamma_{31}$). We also guess the coefficient values, but note that we keep one coefficient 'fixed'. The reason is that the model is normalized through the likelihood estimator, so what matters for the fits are the _ratios_ between the parameters." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Guessed parameter values" ] @@ -2936,9 +2763,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "guessed_parameters = dict(\n", @@ -3098,18 +2923,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Estimator: likelihood function" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "To optimize the model parameters so that the model 'fits' the data distribution, we need to define a measure for the 'distance' between the model to the data. The common choice for complicated, multidimensional models like an amplitude model is the **unbinned negative log likelihood function** (NLL)." ] @@ -3117,9 +2938,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "def unbinned_nll(**parameters: float) -> float:\n", @@ -3150,9 +2969,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{tip}\n", "The phase space (phsp) is used for the normalization calculation for the unbinned log likelihood function\n", @@ -3161,18 +2978,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Optimizer: Minuit" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "As a final step, we find the parameter values that minimize the unbinned Log likelihood function, i.e., we optimize the model with respect to the data distribution. In Python, this can be performed by e.g. [`iminuit`](https://scikit-hep.org/iminuit) package." ] @@ -3180,9 +2993,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "arg_names = tuple(guessed_parameters)\n", @@ -3207,9 +3018,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "optimizer = Minuit(\n", @@ -3252,9 +3061,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Final fit result" ] @@ -3466,9 +3273,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Now we see the fit results of CM angles, Helicity angles and invariant mass as a comparison of data in 1D projection plots" ] @@ -3490,27 +3295,21 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "![](https://github.com/user-attachments/assets/f34d1cbc-81c0-4535-bf94-5346bc4df00e)" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### Dalitz plots of phsp, data, and fit result" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "To conclude our analysis in this document, we examine the Dalitz plots for the following cases: phase space, generated data, default parameters of the model, and fitted parameters of the model.\n", "\n", @@ -3595,9 +3394,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "![](https://github.com/user-attachments/assets/9e002900-9e84-414f-bf42-003cc2cf5968)" ] @@ -3605,8 +3402,7 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] + "jp-MarkdownHeadingCollapsed": true }, "source": [ "## Summary" @@ -3614,9 +3410,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "In this document, we have covered the general workflow of PWA, which consists of three major parts:\n", "\n", @@ -3639,18 +3433,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "This structured approach provides a comprehensive understanding of both the reaction dynamics and kinematics, and helps in extracting meaningful physical insights from the data." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{tip} Extra\n", "have a look for another example of PWA for $J/\\psi \\to \\gamma \\pi^0 \\pi^0$ by using `ComPWA` at [here](https://tensorwaves.readthedocs.io/stable/amplitude-analysis/) .\n", diff --git a/docs/symbolics.ipynb b/docs/symbolics.ipynb index c1252ed0..3776cc80 100644 --- a/docs/symbolics.ipynb +++ b/docs/symbolics.ipynb @@ -25,7 +25,7 @@ }, "outputs": [], "source": [ - "%pip install -q black==24.2.0 sympy==1.12" + "%pip install -q 'numpy~=1.22.4' black==24.2.0 sympy==1.12" ] }, { @@ -82,9 +82,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Even though Python is a popular programming language for data science, it is too slow for performing computations over large data samples. Computations in Python programs are therefore almost always outsourced through third-party Python libraries that are written in C++ or other compiled languages. This leads to an **array-oriented programming** style. Variables represent multidimensional arrays and the computational backend performs the element-wise operations behind-the-scenes. This has the additional benefit that the higher level Python code becomes more readable.\n", "\n", @@ -94,9 +92,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "a_lst = list(range(1_000_000))\n", @@ -105,18 +101,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### Pure Python loop" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Naively, one could compute $c$ for each data point by creating a list and filling it with $c_i = a_i+b_i^2$." ] @@ -124,9 +116,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "%%timeit\n", @@ -137,27 +127,21 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "`for` loops like these are a natural choice when coming from compiled languages like C++, but are considerably much slower when done with Python." ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "#### Equivalent computation with arrays" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "[NumPy](https://numpy.org) is one of the most popular array-oriented libraries for Python. The data points for $a$ and $b$ are now represented by array objects..." ] @@ -165,9 +149,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", @@ -178,9 +160,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "...and the _array-oriented_ computation of $c = a+b^2$ becomes much **faster** and **more readable**." ] @@ -188,9 +168,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "%%timeit\n", @@ -294,9 +272,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "(flexibility)=\n", "## {material-regular}`draw` Flexibility" @@ -356,9 +332,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Internally, SymPy expressions are built up by applying mathematical operations to algebraic objects, such as symbols and \n", "numbers. In this example, we see how the Breit-Wigner function is built up from four symbols, a complex number, and a few integers. The resulting expression can be visualized as an **expression tree** of fundamental mathematical operations." @@ -401,9 +375,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "substituted_expr = expression.subs({m0: 0.980, Γ0: 0.06, g: 1})\n", @@ -443,9 +415,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Expression trees are not only useful for applying algebraic operations to their nodes. They can also be used as a **template for generating code**. In fact, the $\\LaTeX$ formula is generated using SymPy's $\\LaTeX$ printer:" ] @@ -468,9 +438,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "SymPy provides a [large number of code printers](https://docs.sympy.org/latest/modules/codegen.html) for different languages and human-readable serialization standards. A few examples are shown below." ] @@ -517,7 +485,7 @@ "```\n", "```rust\n", "// Rust\n", - "{sp.rust_code(expression)} \n", + "{sp.rust_code(expression)}\n", "```\n", "```xml\n", "\n", @@ -537,9 +505,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "full_func = sp.lambdify(args=(s, m0, Γ0, g), expr=expression, modules=\"numpy\")" @@ -570,9 +536,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ "substituted_func = sp.lambdify(args=s, expr=substituted_expr, modules=\"numpy\")" @@ -598,9 +562,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{sidebar}\n", "![SymPy code generation](https://github.com/ComPWA/compwa.github.io/assets/29308176/a1a19f74-b2dd-484f-804f-02da523ed4b7)\n", @@ -609,9 +571,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "The example as described here is small for illustrative purposes. It turns out that code generation works just as well for **expressions with a much larger number of mathematical operations**, even if in the order of hundreds of thousands (see e.g. the [`tensorwaves` amplitude analysis example](https://tensorwaves.readthedocs.io/stable/amplitude-analysis/)). This is exactly what is needed for fitting amplitude models to data.\n", "\n", @@ -620,9 +580,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ ":::{topic} {material-regular}`draw` Flexibility ✔️\n", "A Computer Algebra System provides a simple way to **separate physics from number crunching**. Amplitude models only have to be formulated symbolically, while computations are outsourced to array-oriented, numerical libraries through automated code generation. This provides us a **[Single Source of Truth](https://en.wikipedia.org/wiki/Single_source_of_truth)** for implemented physics models.\n", @@ -633,9 +591,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "(transparency)=\n", "## {material-regular}`local_library` Transparency" @@ -668,18 +624,14 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Model preservation" ] }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "Amplitude analyses are notoriously hard to reproduce. There are often several ways to formulate the same model, the model complexity makes the source code of the analysis model hard to understand, and starting conditions in a fit can lead to completely different fit results. Formulating the amplitude models symbolically addresses exactly these difficulties.\n", "\n", @@ -692,9 +644,7 @@ }, { "cell_type": "markdown", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ "### Knowledge exchange" ] diff --git a/environment.yml b/environment.yml index bf01f9b7..be07fe4d 100644 --- a/environment.yml +++ b/environment.yml @@ -5,9 +5,9 @@ dependencies: - graphviz # for binder - juliaup - pip - - python==3.10.* + - python==3.12.* - pip: - - -c .constraints/py3.10.txt -e .[dev] + - -c .constraints/py3.12.txt -e .[dev] variables: PRETTIER_LEGACY_CLI: 1 PYTHONHASHSEED: 0 diff --git a/pyproject.toml b/pyproject.toml index 5992c7d0..b140d18d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ dependencies = [ "matplotlib <3.9.0", # https://github.com/ComPWA/compwa.github.io/actions/runs/9189453016/job/25271564131?pr=274 "matplotlib", "mpl-interactions", + "numpy==1.*", ] description = "Source code for the ComPWA Organization pages" dynamic = ["version"] @@ -65,10 +66,8 @@ doc = [ "sphinxcontrib-bibtex >=2", ] jupyter = [ - "black", - "isort", "jupyterlab", - "jupyterlab-code-formatter", + "jupyterlab-code-formatter >=3.0.0", "jupyterlab-git", "jupyterlab-lsp", "jupyterlab-myst", @@ -185,39 +184,11 @@ docstring-code-format = true line-ending = "lf" [tool.ruff.lint] -extend-select = [ - "A", - "B", - "BLE", - "C4", - "C90", - "D", - "EM", - "ERA", - "FA", - "I", - "ICN", - "INP", - "ISC", - "N", - "NPY", - "PGH", - "PIE", - "PL", - "Q", - "RET", - "RSE", - "RUF", - "S", - "SIM", - "T20", - "TCH", - "TID", - "TRY", - "UP", - "YTT", -] ignore = [ + "ANN401", + "ARG00", + "COM812", + "CPY001", "D101", "D102", "D103", @@ -227,18 +198,42 @@ ignore = [ "D213", "D407", "D416", + "DOC", "E501", + "FBT", + "FURB101", + "FURB103", + "FURB140", + "G004", "ISC001", "PLW1514", + "PT001", + "PTH", "SIM108", + "SLF001", ] +select = ["ALL"] task-tags = ["cspell"] +[tool.ruff.lint.flake8-builtins] +builtins-ignorelist = ["display"] + +[tool.ruff.lint.flake8-comprehensions] +allow-dict-calls-with-keyword-arguments = true + +[tool.ruff.lint.flake8-self] +ignore-names = [ + "_latex", + "_print", +] + [tool.ruff.lint.isort] split-on-trailing-comma = false [tool.ruff.lint.per-file-ignores] "**/003.ipynb" = ["B023", "PLC2701"] +"**/009.ipynb" = ["FURB113"] +"**/010.ipynb" = ["FURB113"] "**/011.ipynb" = ["PLC2701", "PLR6301"] "**/014.ipynb" = ["PLC2701"] "**/016.ipynb" = ["PLC2701"] @@ -249,11 +244,13 @@ split-on-trailing-comma = false "**/98*.ipynb" = ["E731", "PLR6301"] "*.ipynb" = [ "A003", + "ANN", "B008", "B018", "C408", "C90", "D", + "E303", "E402", "E703", "E741", @@ -264,6 +261,7 @@ split-on-trailing-comma = false "N807", "N815", "N816", + "PD901", "PLC0415", "PLC2401", "PLR09", @@ -271,8 +269,9 @@ split-on-trailing-comma = false "PLW0602", "PLW0603", "PLW2901", + "PYI034", "RUF001", - "RUF027", # for _latex_repr_ + "RUF027", "S101", "S307", "T20", @@ -302,11 +301,12 @@ split-on-trailing-comma = false "docs/report/011.ipynb" = ["F821"] "docs/report/017.ipynb" = ["S404", "S603"] "docs/report/020.ipynb" = ["F821"] -"docs/report/021.ipynb" = ["S404", "S603"] +"docs/report/021.ipynb" = ["FIX004", "S404", "S603"] "docs/report/022.ipynb" = ["F821"] "docs/report/024.ipynb" = ["F821", "S102"] "docs/report/025.ipynb" = ["S404", "S603"] "docs/report/027.ipynb" = ["PLW1510", "S404", "S603"] +"docs/report/028.ipynb" = ["PERF203"] "pin_nb_requirements.py" = ["INP001", "PLW2901"] "setup.py" = ["D100"]