diff --git a/.constraints/py3.10.txt b/.constraints/py3.10.txt
index 7662eec8..17735a1c 100644
--- a/.constraints/py3.10.txt
+++ b/.constraints/py3.10.txt
@@ -11,7 +11,7 @@ async-lru==2.0.4
attrs==23.2.0
babel==2.14.0
beautifulsoup4==4.12.3
-black==24.2.0
+black==24.3.0
bleach==6.1.0
cachetools==5.3.3
cattrs==23.2.3
@@ -22,8 +22,8 @@ chardet==5.2.0
charset-normalizer==3.3.2
click==8.1.7
colorama==0.4.6
-comm==0.2.1
-contourpy==1.2.0
+comm==0.2.2
+contourpy==1.2.1
cycler==0.12.1
debugpy==1.8.1
decorator==5.1.1
@@ -34,51 +34,51 @@ docutils==0.20.1
exceptiongroup==1.2.0
executing==2.0.1
fastjsonschema==2.19.1
-filelock==3.13.1
-fonttools==4.49.0
+filelock==3.13.3
+fonttools==4.51.0
fqdn==1.5.1
gitdb==4.0.11
-gitpython==3.1.42
-graphviz==0.20.1
+gitpython==3.1.43
+graphviz==0.20.3
greenlet==3.0.3
h11==0.14.0
-httpcore==1.0.4
+httpcore==1.0.5
httpx==0.27.0
identify==2.5.35
idna==3.6
imagesize==1.4.1
-importlib-metadata==7.0.2
+importlib-metadata==7.1.0
iniconfig==2.0.0
-ipykernel==6.29.3
+ipykernel==6.29.4
ipympl==0.9.3
-ipython==8.22.2
+ipython==8.23.0
ipython-genutils==0.2.0
ipywidgets==8.1.2
isoduration==20.11.0
isort==5.13.2
jedi==0.19.1
jinja2==3.1.3
-json5==0.9.22
+json5==0.9.24
jsonpointer==2.4
jsonschema==4.21.1
jsonschema-specifications==2023.12.1
jupyter==1.0.0
jupyter-cache==1.0.0
-jupyter-client==8.6.0
+jupyter-client==8.6.1
jupyter-console==6.6.3
-jupyter-core==5.7.1
-jupyter-events==0.9.0
+jupyter-core==5.7.2
+jupyter-events==0.10.0
jupyter-lsp==2.2.4
jupyter-server==2.13.0
jupyter-server-mathjax==0.2.6
-jupyter-server-terminals==0.5.2
-jupyterlab==4.1.4
+jupyter-server-terminals==0.5.3
+jupyterlab==4.1.5
jupyterlab-code-formatter==2.2.1
jupyterlab-git==0.50.0
jupyterlab-lsp==5.1.0
-jupyterlab-myst==2.3.1
+jupyterlab-myst==2.3.2
jupyterlab-pygments==0.3.0
-jupyterlab-server==2.25.3
+jupyterlab-server==2.25.4
jupyterlab-widgets==3.0.10
kiwisolver==1.4.5
latexcodec==3.0.0
@@ -86,7 +86,7 @@ livereload==2.6.3
lsprotocol==2023.0.1
markdown-it-py==3.0.0
markupsafe==2.1.5
-matplotlib==3.8.3
+matplotlib==3.8.4
matplotlib-inline==0.1.6
mdit-py-plugins==0.4.0
mdurl==0.1.2
@@ -96,25 +96,25 @@ mypy-extensions==1.0.0
myst-nb==1.0.0
myst-parser==2.0.0
nbclient==0.6.8
-nbconvert==7.16.2
+nbconvert==7.16.3
nbdime==4.0.1
-nbformat==5.9.2
+nbformat==5.10.4
nbmake==1.5.3
nest-asyncio==1.6.0
nodeenv==1.8.0
-notebook==7.1.1
+notebook==7.1.2
notebook-shim==0.2.4
numpy==1.26.4
overrides==7.7.0
-packaging==23.2
+packaging==24.0
pandocfilters==1.5.1
-parso==0.8.3
+parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
-pillow==10.2.0
+pillow==10.3.0
platformdirs==4.2.0
pluggy==1.4.0
-pre-commit==3.6.2
+pre-commit==3.7.0
prometheus-client==0.20.0
prompt-toolkit==3.0.43
psutil==5.9.8
@@ -122,29 +122,29 @@ ptyprocess==0.7.0
pure-eval==0.2.2
pybtex==0.24.0
pybtex-docutils==1.0.3
-pycparser==2.21
+pycparser==2.22
pydata-sphinx-theme==0.15.2
pygments==2.17.2
pyparsing==3.1.2
pyproject-api==1.6.1
-pytest==8.0.2
+pytest==8.1.1
python-dateutil==2.9.0.post0
python-json-logger==2.0.7
python-lsp-jsonrpc==1.1.2
python-lsp-ruff==2.2.0
-python-lsp-server==1.10.0
+python-lsp-server==1.11.0
pytoolconfig==1.3.1
pyyaml==6.0.1
pyzmq==25.1.2
qtconsole==5.5.1
qtpy==2.4.1
-referencing==0.33.0
+referencing==0.34.0
requests==2.31.0
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
-rope==1.12.0
+rope==1.13.0
rpds-py==0.18.0
-ruff==0.3.1
+ruff==0.3.5
send2trash==1.8.2
six==1.16.0
smmap==5.0.1
@@ -161,7 +161,7 @@ sphinx-copybutton==0.5.2
sphinx-design==0.5.0
sphinx-hep-pdgref==0.2.0
sphinx-pybtex-etal-style==0.0.2
-sphinx-remove-toctrees==0.0.3
+sphinx-remove-toctrees==1.0.0.post1
sphinx-thebe==0.3.1
sphinx-togglebutton==0.3.2
sphinxcontrib-applehelp==1.0.8
@@ -171,17 +171,17 @@ sphinxcontrib-htmlhelp==2.0.5
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.7
sphinxcontrib-serializinghtml==1.1.10
-sqlalchemy==2.0.28
+sqlalchemy==2.0.29
stack-data==0.6.3
tabulate==0.9.0
-terminado==0.18.0
+terminado==0.18.1
tinycss2==1.2.1
tomli==2.0.1
tornado==6.4
-tox==4.14.1
-traitlets==5.14.1
-types-python-dateutil==2.8.19.20240106
-typing-extensions==4.10.0
+tox==4.14.2
+traitlets==5.14.2
+types-python-dateutil==2.9.0.20240316
+typing-extensions==4.11.0
ujson==5.9.0
uri-template==1.3.0
urllib3==2.2.1
@@ -190,9 +190,9 @@ wcwidth==0.2.13
webcolors==1.13
webencodings==0.5.1
websocket-client==1.7.0
-wheel==0.42.0
+wheel==0.43.0
widgetsnbextension==4.0.10
-zipp==3.17.0
+zipp==3.18.1
# The following packages were excluded from the output:
# setuptools
diff --git a/.constraints/py3.11.txt b/.constraints/py3.11.txt
index c7d81890..12f77b8e 100644
--- a/.constraints/py3.11.txt
+++ b/.constraints/py3.11.txt
@@ -11,7 +11,7 @@ async-lru==2.0.4
attrs==23.2.0
babel==2.14.0
beautifulsoup4==4.12.3
-black==24.2.0
+black==24.3.0
bleach==6.1.0
cachetools==5.3.3
cattrs==23.2.3
@@ -22,8 +22,8 @@ chardet==5.2.0
charset-normalizer==3.3.2
click==8.1.7
colorama==0.4.6
-comm==0.2.1
-contourpy==1.2.0
+comm==0.2.2
+contourpy==1.2.1
cycler==0.12.1
debugpy==1.8.1
decorator==5.1.1
@@ -33,51 +33,51 @@ docstring-to-markdown==0.15
docutils==0.20.1
executing==2.0.1
fastjsonschema==2.19.1
-filelock==3.13.1
-fonttools==4.49.0
+filelock==3.13.3
+fonttools==4.51.0
fqdn==1.5.1
gitdb==4.0.11
-gitpython==3.1.42
-graphviz==0.20.1
+gitpython==3.1.43
+graphviz==0.20.3
greenlet==3.0.3
h11==0.14.0
-httpcore==1.0.4
+httpcore==1.0.5
httpx==0.27.0
identify==2.5.35
idna==3.6
imagesize==1.4.1
-importlib-metadata==7.0.2
+importlib-metadata==7.1.0
iniconfig==2.0.0
-ipykernel==6.29.3
+ipykernel==6.29.4
ipympl==0.9.3
-ipython==8.22.2
+ipython==8.23.0
ipython-genutils==0.2.0
ipywidgets==8.1.2
isoduration==20.11.0
isort==5.13.2
jedi==0.19.1
jinja2==3.1.3
-json5==0.9.22
+json5==0.9.24
jsonpointer==2.4
jsonschema==4.21.1
jsonschema-specifications==2023.12.1
jupyter==1.0.0
jupyter-cache==1.0.0
-jupyter-client==8.6.0
+jupyter-client==8.6.1
jupyter-console==6.6.3
-jupyter-core==5.7.1
-jupyter-events==0.9.0
+jupyter-core==5.7.2
+jupyter-events==0.10.0
jupyter-lsp==2.2.4
jupyter-server==2.13.0
jupyter-server-mathjax==0.2.6
-jupyter-server-terminals==0.5.2
-jupyterlab==4.1.4
+jupyter-server-terminals==0.5.3
+jupyterlab==4.1.5
jupyterlab-code-formatter==2.2.1
jupyterlab-git==0.50.0
jupyterlab-lsp==5.1.0
-jupyterlab-myst==2.3.1
+jupyterlab-myst==2.3.2
jupyterlab-pygments==0.3.0
-jupyterlab-server==2.25.3
+jupyterlab-server==2.25.4
jupyterlab-widgets==3.0.10
kiwisolver==1.4.5
latexcodec==3.0.0
@@ -85,7 +85,7 @@ livereload==2.6.3
lsprotocol==2023.0.1
markdown-it-py==3.0.0
markupsafe==2.1.5
-matplotlib==3.8.3
+matplotlib==3.8.4
matplotlib-inline==0.1.6
mdit-py-plugins==0.4.0
mdurl==0.1.2
@@ -95,25 +95,25 @@ mypy-extensions==1.0.0
myst-nb==1.0.0
myst-parser==2.0.0
nbclient==0.6.8
-nbconvert==7.16.2
+nbconvert==7.16.3
nbdime==4.0.1
-nbformat==5.9.2
+nbformat==5.10.4
nbmake==1.5.3
nest-asyncio==1.6.0
nodeenv==1.8.0
-notebook==7.1.1
+notebook==7.1.2
notebook-shim==0.2.4
numpy==1.26.4
overrides==7.7.0
-packaging==23.2
+packaging==24.0
pandocfilters==1.5.1
-parso==0.8.3
+parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
-pillow==10.2.0
+pillow==10.3.0
platformdirs==4.2.0
pluggy==1.4.0
-pre-commit==3.6.2
+pre-commit==3.7.0
prometheus-client==0.20.0
prompt-toolkit==3.0.43
psutil==5.9.8
@@ -121,29 +121,29 @@ ptyprocess==0.7.0
pure-eval==0.2.2
pybtex==0.24.0
pybtex-docutils==1.0.3
-pycparser==2.21
+pycparser==2.22
pydata-sphinx-theme==0.15.2
pygments==2.17.2
pyparsing==3.1.2
pyproject-api==1.6.1
-pytest==8.0.2
+pytest==8.1.1
python-dateutil==2.9.0.post0
python-json-logger==2.0.7
python-lsp-jsonrpc==1.1.2
python-lsp-ruff==2.2.0
-python-lsp-server==1.10.0
+python-lsp-server==1.11.0
pytoolconfig==1.3.1
pyyaml==6.0.1
pyzmq==25.1.2
qtconsole==5.5.1
qtpy==2.4.1
-referencing==0.33.0
+referencing==0.34.0
requests==2.31.0
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
-rope==1.12.0
+rope==1.13.0
rpds-py==0.18.0
-ruff==0.3.1
+ruff==0.3.5
send2trash==1.8.2
six==1.16.0
smmap==5.0.1
@@ -160,7 +160,7 @@ sphinx-copybutton==0.5.2
sphinx-design==0.5.0
sphinx-hep-pdgref==0.2.0
sphinx-pybtex-etal-style==0.0.2
-sphinx-remove-toctrees==0.0.3
+sphinx-remove-toctrees==1.0.0.post1
sphinx-thebe==0.3.1
sphinx-togglebutton==0.3.2
sphinxcontrib-applehelp==1.0.8
@@ -170,16 +170,16 @@ sphinxcontrib-htmlhelp==2.0.5
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.7
sphinxcontrib-serializinghtml==1.1.10
-sqlalchemy==2.0.28
+sqlalchemy==2.0.29
stack-data==0.6.3
tabulate==0.9.0
-terminado==0.18.0
+terminado==0.18.1
tinycss2==1.2.1
tornado==6.4
-tox==4.14.1
-traitlets==5.14.1
-types-python-dateutil==2.8.19.20240106
-typing-extensions==4.10.0
+tox==4.14.2
+traitlets==5.14.2
+types-python-dateutil==2.9.0.20240316
+typing-extensions==4.11.0
ujson==5.9.0
uri-template==1.3.0
urllib3==2.2.1
@@ -188,9 +188,9 @@ wcwidth==0.2.13
webcolors==1.13
webencodings==0.5.1
websocket-client==1.7.0
-wheel==0.42.0
+wheel==0.43.0
widgetsnbextension==4.0.10
-zipp==3.17.0
+zipp==3.18.1
# The following packages were excluded from the output:
# setuptools
diff --git a/.constraints/py3.12.txt b/.constraints/py3.12.txt
index f448e009..12322693 100644
--- a/.constraints/py3.12.txt
+++ b/.constraints/py3.12.txt
@@ -11,7 +11,7 @@ async-lru==2.0.4
attrs==23.2.0
babel==2.14.0
beautifulsoup4==4.12.3
-black==24.2.0
+black==24.3.0
bleach==6.1.0
cachetools==5.3.3
cattrs==23.2.3
@@ -22,8 +22,8 @@ chardet==5.2.0
charset-normalizer==3.3.2
click==8.1.7
colorama==0.4.6
-comm==0.2.1
-contourpy==1.2.0
+comm==0.2.2
+contourpy==1.2.1
cycler==0.12.1
debugpy==1.8.1
decorator==5.1.1
@@ -33,51 +33,51 @@ docstring-to-markdown==0.15
docutils==0.20.1
executing==2.0.1
fastjsonschema==2.19.1
-filelock==3.13.1
-fonttools==4.49.0
+filelock==3.13.3
+fonttools==4.51.0
fqdn==1.5.1
gitdb==4.0.11
-gitpython==3.1.42
-graphviz==0.20.1
+gitpython==3.1.43
+graphviz==0.20.3
greenlet==3.0.3
h11==0.14.0
-httpcore==1.0.4
+httpcore==1.0.5
httpx==0.27.0
identify==2.5.35
idna==3.6
imagesize==1.4.1
-importlib-metadata==7.0.2
+importlib-metadata==7.1.0
iniconfig==2.0.0
-ipykernel==6.29.3
+ipykernel==6.29.4
ipympl==0.9.3
-ipython==8.22.2
+ipython==8.23.0
ipython-genutils==0.2.0
ipywidgets==8.1.2
isoduration==20.11.0
isort==5.13.2
jedi==0.19.1
jinja2==3.1.3
-json5==0.9.22
+json5==0.9.24
jsonpointer==2.4
jsonschema==4.21.1
jsonschema-specifications==2023.12.1
jupyter==1.0.0
jupyter-cache==1.0.0
-jupyter-client==8.6.0
+jupyter-client==8.6.1
jupyter-console==6.6.3
-jupyter-core==5.7.1
-jupyter-events==0.9.0
+jupyter-core==5.7.2
+jupyter-events==0.10.0
jupyter-lsp==2.2.4
jupyter-server==2.13.0
jupyter-server-mathjax==0.2.6
-jupyter-server-terminals==0.5.2
-jupyterlab==4.1.4
+jupyter-server-terminals==0.5.3
+jupyterlab==4.1.5
jupyterlab-code-formatter==2.2.1
jupyterlab-git==0.50.0
jupyterlab-lsp==5.1.0
-jupyterlab-myst==2.3.1
+jupyterlab-myst==2.3.2
jupyterlab-pygments==0.3.0
-jupyterlab-server==2.25.3
+jupyterlab-server==2.25.4
jupyterlab-widgets==3.0.10
kiwisolver==1.4.5
latexcodec==3.0.0
@@ -85,7 +85,7 @@ livereload==2.6.3
lsprotocol==2023.0.1
markdown-it-py==3.0.0
markupsafe==2.1.5
-matplotlib==3.8.3
+matplotlib==3.8.4
matplotlib-inline==0.1.6
mdit-py-plugins==0.4.0
mdurl==0.1.2
@@ -95,25 +95,25 @@ mypy-extensions==1.0.0
myst-nb==1.0.0
myst-parser==2.0.0
nbclient==0.6.8
-nbconvert==7.16.2
+nbconvert==7.16.3
nbdime==4.0.1
-nbformat==5.9.2
+nbformat==5.10.4
nbmake==1.5.3
nest-asyncio==1.6.0
nodeenv==1.8.0
-notebook==7.1.1
+notebook==7.1.2
notebook-shim==0.2.4
numpy==1.26.4
overrides==7.7.0
-packaging==23.2
+packaging==24.0
pandocfilters==1.5.1
-parso==0.8.3
+parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
-pillow==10.2.0
+pillow==10.3.0
platformdirs==4.2.0
pluggy==1.4.0
-pre-commit==3.6.2
+pre-commit==3.7.0
prometheus-client==0.20.0
prompt-toolkit==3.0.43
psutil==5.9.8
@@ -121,29 +121,29 @@ ptyprocess==0.7.0
pure-eval==0.2.2
pybtex==0.24.0
pybtex-docutils==1.0.3
-pycparser==2.21
+pycparser==2.22
pydata-sphinx-theme==0.15.2
pygments==2.17.2
pyparsing==3.1.2
pyproject-api==1.6.1
-pytest==8.0.2
+pytest==8.1.1
python-dateutil==2.9.0.post0
python-json-logger==2.0.7
python-lsp-jsonrpc==1.1.2
python-lsp-ruff==2.2.0
-python-lsp-server==1.10.0
+python-lsp-server==1.11.0
pytoolconfig==1.3.1
pyyaml==6.0.1
pyzmq==25.1.2
qtconsole==5.5.1
qtpy==2.4.1
-referencing==0.33.0
+referencing==0.34.0
requests==2.31.0
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
-rope==1.12.0
+rope==1.13.0
rpds-py==0.18.0
-ruff==0.3.1
+ruff==0.3.5
send2trash==1.8.2
six==1.16.0
smmap==5.0.1
@@ -160,7 +160,7 @@ sphinx-copybutton==0.5.2
sphinx-design==0.5.0
sphinx-hep-pdgref==0.2.0
sphinx-pybtex-etal-style==0.0.2
-sphinx-remove-toctrees==0.0.3
+sphinx-remove-toctrees==1.0.0.post1
sphinx-thebe==0.3.1
sphinx-togglebutton==0.3.2
sphinxcontrib-applehelp==1.0.8
@@ -170,16 +170,16 @@ sphinxcontrib-htmlhelp==2.0.5
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.7
sphinxcontrib-serializinghtml==1.1.10
-sqlalchemy==2.0.28
+sqlalchemy==2.0.29
stack-data==0.6.3
tabulate==0.9.0
-terminado==0.18.0
+terminado==0.18.1
tinycss2==1.2.1
tornado==6.4
-tox==4.14.1
-traitlets==5.14.1
-types-python-dateutil==2.8.19.20240106
-typing-extensions==4.10.0
+tox==4.14.2
+traitlets==5.14.2
+types-python-dateutil==2.9.0.20240316
+typing-extensions==4.11.0
ujson==5.9.0
uri-template==1.3.0
urllib3==2.2.1
@@ -188,9 +188,9 @@ wcwidth==0.2.13
webcolors==1.13
webencodings==0.5.1
websocket-client==1.7.0
-wheel==0.42.0
+wheel==0.43.0
widgetsnbextension==4.0.10
-zipp==3.17.0
+zipp==3.18.1
# The following packages were excluded from the output:
# setuptools
diff --git a/.constraints/py3.7.txt b/.constraints/py3.7.txt
index fd299975..b86e887d 100644
--- a/.constraints/py3.7.txt
+++ b/.constraints/py3.7.txt
@@ -39,7 +39,7 @@ filelock==3.12.2
fonttools==4.38.0
fqdn==1.5.1
gitdb==4.0.11
-gitpython==3.1.42
+gitpython==3.1.43
graphviz==0.20.1
greenlet==3.0.3
identify==2.5.24
@@ -106,9 +106,9 @@ nodeenv==1.8.0
notebook==6.5.6
notebook-shim==0.2.4
numpy==1.21.6
-packaging==23.2
+packaging==24.0
pandocfilters==1.5.1
-parso==0.8.3
+parso==0.8.4
pathspec==0.11.2
pexpect==4.9.0
pickleshare==0.7.5
@@ -124,7 +124,7 @@ ptyprocess==0.7.0
pybtex==0.24.0
pybtex-docutils==1.0.3
pycparser==2.21
-pydantic==1.10.14
+pydantic==1.10.15
pydata-sphinx-theme==0.13.3
pygments==2.17.2
pyparsing==3.1.2
diff --git a/.constraints/py3.8.txt b/.constraints/py3.8.txt
index 40a53f12..6da2de11 100644
--- a/.constraints/py3.8.txt
+++ b/.constraints/py3.8.txt
@@ -12,7 +12,7 @@ attrs==23.2.0
babel==2.14.0
backcall==0.2.0
beautifulsoup4==4.12.3
-black==24.2.0
+black==24.3.0
bleach==6.1.0
cachetools==5.3.3
cattrs==23.2.3
@@ -23,7 +23,7 @@ chardet==5.2.0
charset-normalizer==3.3.2
click==8.1.7
colorama==0.4.6
-comm==0.2.1
+comm==0.2.2
contourpy==1.1.1
cycler==0.12.1
debugpy==1.8.1
@@ -35,23 +35,23 @@ docutils==0.17.1
exceptiongroup==1.2.0
executing==2.0.1
fastjsonschema==2.19.1
-filelock==3.13.1
-fonttools==4.49.0
+filelock==3.13.3
+fonttools==4.51.0
fqdn==1.5.1
gitdb==4.0.11
-gitpython==3.1.42
-graphviz==0.20.1
+gitpython==3.1.43
+graphviz==0.20.3
greenlet==3.0.3
h11==0.14.0
-httpcore==1.0.4
+httpcore==1.0.5
httpx==0.27.0
identify==2.5.35
idna==3.6
imagesize==1.4.1
-importlib-metadata==7.0.2
-importlib-resources==6.1.3
+importlib-metadata==7.1.0
+importlib-resources==6.4.0
iniconfig==2.0.0
-ipykernel==6.29.3
+ipykernel==6.29.4
ipympl==0.9.3
ipython==8.12.3
ipython-genutils==0.2.0
@@ -60,27 +60,27 @@ isoduration==20.11.0
isort==5.13.2
jedi==0.19.1
jinja2==3.1.3
-json5==0.9.22
+json5==0.9.24
jsonpointer==2.4
jsonschema==4.21.1
jsonschema-specifications==2023.12.1
jupyter==1.0.0
jupyter-cache==0.6.1
-jupyter-client==8.6.0
+jupyter-client==8.6.1
jupyter-console==6.6.3
-jupyter-core==5.7.1
-jupyter-events==0.9.0
+jupyter-core==5.7.2
+jupyter-events==0.10.0
jupyter-lsp==2.2.4
jupyter-server==2.13.0
jupyter-server-mathjax==0.2.6
-jupyter-server-terminals==0.5.2
-jupyterlab==4.1.4
+jupyter-server-terminals==0.5.3
+jupyterlab==4.1.5
jupyterlab-code-formatter==2.2.1
jupyterlab-git==0.50.0
jupyterlab-lsp==5.1.0
-jupyterlab-myst==2.3.1
+jupyterlab-myst==2.3.2
jupyterlab-pygments==0.3.0
-jupyterlab-server==2.25.3
+jupyterlab-server==2.25.4
jupyterlab-widgets==3.0.10
kiwisolver==1.4.5
latexcodec==3.0.0
@@ -98,23 +98,23 @@ mypy-extensions==1.0.0
myst-nb==0.17.2
myst-parser==0.18.1
nbclient==0.6.8
-nbconvert==7.16.2
+nbconvert==7.16.3
nbdime==4.0.1
-nbformat==5.9.2
+nbformat==5.10.4
nbmake==1.5.3
nest-asyncio==1.6.0
nodeenv==1.8.0
-notebook==7.1.1
+notebook==7.1.2
notebook-shim==0.2.4
numpy==1.24.4
overrides==7.7.0
-packaging==23.2
+packaging==24.0
pandocfilters==1.5.1
-parso==0.8.3
+parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
pickleshare==0.7.5
-pillow==10.2.0
+pillow==10.3.0
pkgutil-resolve-name==1.3.10
platformdirs==4.2.0
pluggy==1.4.0
@@ -126,30 +126,30 @@ ptyprocess==0.7.0
pure-eval==0.2.2
pybtex==0.24.0
pybtex-docutils==1.0.3
-pycparser==2.21
+pycparser==2.22
pydata-sphinx-theme==0.14.4
pygments==2.17.2
pyparsing==3.1.2
pyproject-api==1.6.1
-pytest==8.0.2
+pytest==8.1.1
python-dateutil==2.9.0.post0
python-json-logger==2.0.7
python-lsp-jsonrpc==1.1.2
python-lsp-ruff==2.2.0
-python-lsp-server==1.10.0
+python-lsp-server==1.11.0
pytoolconfig==1.3.1
pytz==2024.1
pyyaml==6.0.1
pyzmq==25.1.2
qtconsole==5.5.1
qtpy==2.4.1
-referencing==0.33.0
+referencing==0.34.0
requests==2.31.0
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
-rope==1.12.0
+rope==1.13.0
rpds-py==0.18.0
-ruff==0.3.1
+ruff==0.3.5
send2trash==1.8.2
six==1.16.0
smmap==5.0.1
@@ -176,17 +176,17 @@ sphinxcontrib-htmlhelp==2.0.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
-sqlalchemy==2.0.28
+sqlalchemy==2.0.29
stack-data==0.6.3
tabulate==0.9.0
-terminado==0.18.0
+terminado==0.18.1
tinycss2==1.2.1
tomli==2.0.1
tornado==6.4
-tox==4.14.1
-traitlets==5.14.1
-types-python-dateutil==2.8.19.20240106
-typing-extensions==4.10.0
+tox==4.14.2
+traitlets==5.14.2
+types-python-dateutil==2.9.0.20240316
+typing-extensions==4.11.0
ujson==5.9.0
uri-template==1.3.0
urllib3==2.2.1
@@ -195,9 +195,9 @@ wcwidth==0.2.13
webcolors==1.13
webencodings==0.5.1
websocket-client==1.7.0
-wheel==0.42.0
+wheel==0.43.0
widgetsnbextension==4.0.10
-zipp==3.17.0
+zipp==3.18.1
# The following packages were excluded from the output:
# setuptools
diff --git a/.constraints/py3.9.txt b/.constraints/py3.9.txt
index 14c707f4..2e9b0049 100644
--- a/.constraints/py3.9.txt
+++ b/.constraints/py3.9.txt
@@ -11,7 +11,7 @@ async-lru==2.0.4
attrs==23.2.0
babel==2.14.0
beautifulsoup4==4.12.3
-black==24.2.0
+black==24.3.0
bleach==6.1.0
cachetools==5.3.3
cattrs==23.2.3
@@ -22,8 +22,8 @@ chardet==5.2.0
charset-normalizer==3.3.2
click==8.1.7
colorama==0.4.6
-comm==0.2.1
-contourpy==1.2.0
+comm==0.2.2
+contourpy==1.2.1
cycler==0.12.1
debugpy==1.8.1
decorator==5.1.1
@@ -34,23 +34,23 @@ docutils==0.20.1
exceptiongroup==1.2.0
executing==2.0.1
fastjsonschema==2.19.1
-filelock==3.13.1
-fonttools==4.49.0
+filelock==3.13.3
+fonttools==4.51.0
fqdn==1.5.1
gitdb==4.0.11
-gitpython==3.1.42
-graphviz==0.20.1
+gitpython==3.1.43
+graphviz==0.20.3
greenlet==3.0.3
h11==0.14.0
-httpcore==1.0.4
+httpcore==1.0.5
httpx==0.27.0
identify==2.5.35
idna==3.6
imagesize==1.4.1
-importlib-metadata==7.0.2
-importlib-resources==6.1.3
+importlib-metadata==7.1.0
+importlib-resources==6.4.0
iniconfig==2.0.0
-ipykernel==6.29.3
+ipykernel==6.29.4
ipympl==0.9.3
ipython==8.18.1
ipython-genutils==0.2.0
@@ -59,27 +59,27 @@ isoduration==20.11.0
isort==5.13.2
jedi==0.19.1
jinja2==3.1.3
-json5==0.9.22
+json5==0.9.24
jsonpointer==2.4
jsonschema==4.21.1
jsonschema-specifications==2023.12.1
jupyter==1.0.0
jupyter-cache==1.0.0
-jupyter-client==8.6.0
+jupyter-client==8.6.1
jupyter-console==6.6.3
-jupyter-core==5.7.1
-jupyter-events==0.9.0
+jupyter-core==5.7.2
+jupyter-events==0.10.0
jupyter-lsp==2.2.4
jupyter-server==2.13.0
jupyter-server-mathjax==0.2.6
-jupyter-server-terminals==0.5.2
-jupyterlab==4.1.4
+jupyter-server-terminals==0.5.3
+jupyterlab==4.1.5
jupyterlab-code-formatter==2.2.1
jupyterlab-git==0.50.0
jupyterlab-lsp==5.1.0
-jupyterlab-myst==2.3.1
+jupyterlab-myst==2.3.2
jupyterlab-pygments==0.3.0
-jupyterlab-server==2.25.3
+jupyterlab-server==2.25.4
jupyterlab-widgets==3.0.10
kiwisolver==1.4.5
latexcodec==3.0.0
@@ -87,7 +87,7 @@ livereload==2.6.3
lsprotocol==2023.0.1
markdown-it-py==3.0.0
markupsafe==2.1.5
-matplotlib==3.8.3
+matplotlib==3.8.4
matplotlib-inline==0.1.6
mdit-py-plugins==0.4.0
mdurl==0.1.2
@@ -97,25 +97,25 @@ mypy-extensions==1.0.0
myst-nb==1.0.0
myst-parser==2.0.0
nbclient==0.6.8
-nbconvert==7.16.2
+nbconvert==7.16.3
nbdime==4.0.1
-nbformat==5.9.2
+nbformat==5.10.4
nbmake==1.5.3
nest-asyncio==1.6.0
nodeenv==1.8.0
-notebook==7.1.1
+notebook==7.1.2
notebook-shim==0.2.4
numpy==1.26.4
overrides==7.7.0
-packaging==23.2
+packaging==24.0
pandocfilters==1.5.1
-parso==0.8.3
+parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
-pillow==10.2.0
+pillow==10.3.0
platformdirs==4.2.0
pluggy==1.4.0
-pre-commit==3.6.2
+pre-commit==3.7.0
prometheus-client==0.20.0
prompt-toolkit==3.0.43
psutil==5.9.8
@@ -123,29 +123,29 @@ ptyprocess==0.7.0
pure-eval==0.2.2
pybtex==0.24.0
pybtex-docutils==1.0.3
-pycparser==2.21
+pycparser==2.22
pydata-sphinx-theme==0.15.2
pygments==2.17.2
pyparsing==3.1.2
pyproject-api==1.6.1
-pytest==8.0.2
+pytest==8.1.1
python-dateutil==2.9.0.post0
python-json-logger==2.0.7
python-lsp-jsonrpc==1.1.2
python-lsp-ruff==2.2.0
-python-lsp-server==1.10.0
+python-lsp-server==1.11.0
pytoolconfig==1.3.1
pyyaml==6.0.1
pyzmq==25.1.2
qtconsole==5.5.1
qtpy==2.4.1
-referencing==0.33.0
+referencing==0.34.0
requests==2.31.0
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
-rope==1.12.0
+rope==1.13.0
rpds-py==0.18.0
-ruff==0.3.1
+ruff==0.3.5
send2trash==1.8.2
six==1.16.0
smmap==5.0.1
@@ -162,7 +162,7 @@ sphinx-copybutton==0.5.2
sphinx-design==0.5.0
sphinx-hep-pdgref==0.2.0
sphinx-pybtex-etal-style==0.0.2
-sphinx-remove-toctrees==0.0.3
+sphinx-remove-toctrees==1.0.0.post1
sphinx-thebe==0.3.1
sphinx-togglebutton==0.3.2
sphinxcontrib-applehelp==1.0.8
@@ -172,17 +172,17 @@ sphinxcontrib-htmlhelp==2.0.5
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.7
sphinxcontrib-serializinghtml==1.1.10
-sqlalchemy==2.0.28
+sqlalchemy==2.0.29
stack-data==0.6.3
tabulate==0.9.0
-terminado==0.18.0
+terminado==0.18.1
tinycss2==1.2.1
tomli==2.0.1
tornado==6.4
-tox==4.14.1
-traitlets==5.14.1
-types-python-dateutil==2.8.19.20240106
-typing-extensions==4.10.0
+tox==4.14.2
+traitlets==5.14.2
+types-python-dateutil==2.9.0.20240316
+typing-extensions==4.11.0
ujson==5.9.0
uri-template==1.3.0
urllib3==2.2.1
@@ -191,9 +191,9 @@ wcwidth==0.2.13
webcolors==1.13
webencodings==0.5.1
websocket-client==1.7.0
-wheel==0.42.0
+wheel==0.43.0
widgetsnbextension==4.0.10
-zipp==3.17.0
+zipp==3.18.1
# The following packages were excluded from the output:
# setuptools
diff --git a/.cspell.json b/.cspell.json
index 311821c9..4a68c217 100644
--- a/.cspell.json
+++ b/.cspell.json
@@ -61,8 +61,10 @@
"autograd",
"blatt",
"breit",
+ "chromodynamics",
"compwa",
"conda",
+ "Curvenote",
"Dalitz",
"deadsnakes",
"defaultdict",
@@ -84,23 +86,29 @@
"lambdifying",
"LHCb",
"lineshape",
+ "lineshapes",
+ "Mathematica",
"MathML",
"matplotlib",
"Mikhasenko",
"miniconda",
"mkdir",
+ "multivalued",
"mypy",
+ "Numba",
"numpy",
"parametrizations",
"pathlib",
"permutate",
"Plotly",
- "pypa",
+ "PyPA",
"pyproject",
"pytest",
"PYTHONHASHSEED",
"qrules",
+ "Reana",
"roadmap",
+ "Schwarz",
"Scikit",
"scipy",
"sympify",
@@ -109,6 +117,7 @@
"tensorwaves",
"textwrap",
"toolkits",
+ "TPUs",
"traceback",
"unbinned",
"unitarity",
@@ -119,8 +128,12 @@
"Zenodo"
],
"ignoreWords": [
+ "Basdevant",
"Colab",
+ "Danilkin",
+ "Deineka",
"MAINT",
+ "Tiator",
"absl",
"adrs",
"allclose",
@@ -164,9 +177,12 @@
"coolwarm",
"csqrt",
"cstride",
+ "cxxcode",
"dalitzplot",
"darkred",
"dasharray",
+ "dataclass",
+ "dataclasses",
"dataframe",
"deepcopy",
"displaystyle",
@@ -183,10 +199,12 @@
"expertsystem",
"facecolor",
"facecolors",
+ "fcode",
"figsize",
"filterwarnings",
"fontcolor",
"fontsize",
+ "forall",
"framealpha",
"funcs",
"getitem",
@@ -200,6 +218,7 @@
"heli",
"hepstats",
"histtype",
+ "hoverinfo",
"hspace",
"hypotests",
"imag",
@@ -213,6 +232,7 @@
"ipython",
"ipywidgets",
"isinstance",
+ "isnan",
"isort",
"jaxlib",
"joinpath",
@@ -233,6 +253,7 @@
"lstrip",
"makedirs",
"marangotto",
+ "matexpr",
"mathbb",
"mathbf",
"mathcal",
@@ -244,6 +265,7 @@
"meshgrid",
"mmikhasenko",
"mname",
+ "mplot3d",
"msigma",
"multiline",
"mystnb",
@@ -259,6 +281,7 @@
"ndarray",
"nonlocal",
"nonumber",
+ "nopython",
"noqa",
"noreply",
"nrows",
@@ -294,6 +317,8 @@
"relim",
"repr",
"richman",
+ "rightarrow",
+ "royalblue",
"rpartition",
"rstride",
"rstrip",
@@ -333,6 +358,7 @@
"tickvals",
"timeit",
"toctree",
+ "toprettyxml",
"tqdm",
"treewise",
"unevaluatable",
@@ -342,6 +368,7 @@
"vmax",
"vmin",
"wspace",
+ "xanchor",
"xaxis",
"xdata",
"xlabel",
@@ -351,6 +378,7 @@
"xtick",
"xticklabels",
"xticks",
+ "yanchor",
"yaxis",
"ydata",
"ylabel",
diff --git a/.gitignore b/.gitignore
index 5dc65be5..df349805 100644
--- a/.gitignore
+++ b/.gitignore
@@ -55,3 +55,4 @@ pyvenv*/
!codecov.yml
!environment.yml
!pyrightconfig.json
+.jupyter_ystore.db
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a7738248..06d285cc 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -53,10 +53,9 @@ repos:
- --repo-title=ComPWA Organization
- id: colab-toc-visible
- id: fix-nbformat-version
- - id: set-nb-cells
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.3.1
+ rev: v0.3.5
hooks:
- id: ruff
args: [--fix]
@@ -107,7 +106,7 @@ repos:
exclude: (?x)^(.*/Manifest\.toml|.*/Project\.toml)$
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.28.0
+ rev: 0.28.1
hooks:
- id: check-jsonschema
name: Check CITATION.cff
@@ -120,7 +119,7 @@ repos:
pass_filenames: false
- repo: https://github.com/streetsidesoftware/cspell-cli
- rev: v8.6.0
+ rev: v8.6.1
hooks:
- id: cspell
@@ -158,6 +157,6 @@ repos:
- jupyter
- repo: https://github.com/ComPWA/mirrors-pyright
- rev: v1.1.352
+ rev: v1.1.357
hooks:
- id: pyright
diff --git a/docs/adr/001/operators.ipynb b/docs/adr/001/operators.ipynb
index a8f284ad..7ad21fe3 100644
--- a/docs/adr/001/operators.ipynb
+++ b/docs/adr/001/operators.ipynb
@@ -1,30 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "%%capture\n",
- "\n",
- "%config Completer.use_jedi = False\n",
- "%config InlineBackend.figure_formats = ['svg']\n",
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/conf.py b/docs/conf.py
index b93dc7c5..e962eff9 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -113,6 +113,7 @@ def get_nb_exclusion_patterns() -> list[str]:
"sphinx.ext.intersphinx",
"sphinx.ext.mathjax",
"sphinx.ext.napoleon",
+ "sphinx.ext.todo",
"sphinx_api_relink",
"sphinx_codeautolink",
"sphinx_comments",
@@ -126,6 +127,9 @@ def get_nb_exclusion_patterns() -> list[str]:
"sphinxcontrib.bibtex",
]
graphviz_output_format = "svg"
+html_css_files = [
+ "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.1.1/css/all.min.css",
+]
html_favicon = "_static/favicon.ico"
html_js_files = [
"https://cdn.datatables.net/1.13.6/js/jquery.dataTables.min.js",
@@ -172,12 +176,13 @@ def get_nb_exclusion_patterns() -> list[str]:
}
html_title = "Common Partial Wave Analysis Project"
intersphinx_mapping = {
- "IPython": (f"https://ipython.readthedocs.io/en/{pin('IPython')}", None),
- "ampform": ("https://ampform.readthedocs.io/en/stable", None),
+ "ampform-0.14.x": ("https://ampform.readthedocs.io/0.14.x", None),
+ "ampform": ("https://ampform.readthedocs.io/stable", None),
"attrs": (f"https://www.attrs.org/en/{pin('attrs')}", None),
"expertsystem": ("https://expertsystem.readthedocs.io/en/stable", None),
"graphviz": ("https://graphviz.readthedocs.io/en/stable", None),
"hepstats": ("https://scikit-hep.org/hepstats", None),
+ "IPython": (f"https://ipython.readthedocs.io/en/{pin('IPython')}", None),
"ipywidgets": (f"https://ipywidgets.readthedocs.io/en/{pin('ipywidgets')}", None),
"jax": ("https://jax.readthedocs.io/en/latest", None),
"matplotlib": (f"https://matplotlib.org/{pin('matplotlib')}", None),
@@ -185,15 +190,17 @@ def get_nb_exclusion_patterns() -> list[str]:
f"https://mpl-interactions.readthedocs.io/en/{pin('mpl-interactions')}",
None,
),
+ "numba": ("https://numba.pydata.org/numba-doc/latest", None),
"numpy": (f"https://numpy.org/doc/{pin_minor('numpy')}", None),
"plotly": ("https://plotly.com/python-api-reference/", None),
"pwa": ("https://pwa.readthedocs.io", None),
"python": ("https://docs.python.org/3", None),
- "qrules": ("https://qrules.readthedocs.io/en/stable", None),
- "qrules-0.9.x": ("https://qrules.readthedocs.io/en/0.9.x", None),
+ "qrules-0.9.x": ("https://qrules.readthedocs.io/0.9.x", None),
+ "qrules": ("https://qrules.readthedocs.io/stable", None),
"scipy": ("https://docs.scipy.org/doc/scipy-1.7.0", None),
"sympy": ("https://docs.sympy.org/latest", None),
- "tensorwaves": ("https://tensorwaves.readthedocs.io/en/stable", None),
+ "tensorwaves": ("https://tensorwaves.readthedocs.io/stable", None),
+ "torch": ("https://pytorch.org/docs/stable", None),
"zfit": ("https://zfit.readthedocs.io/en/latest", None),
}
linkcheck_anchors = False
@@ -210,6 +217,7 @@ def get_nb_exclusion_patterns() -> list[str]:
"https://mybinder.org", # often instable
"https://open.vscode.dev",
"https://rosettacode.org",
+ "https://stackoverflow.com",
"https://via.placeholder.com", # irregular timeout
"https://www.andiamo.co.uk/resources/iso-language-codes", # 443, but works
"https://www.bookfinder.com",
@@ -263,3 +271,4 @@ def get_nb_exclusion_patterns() -> list[str]:
"repository_url": html_theme_options["repository_url"],
"repository_branch": html_theme_options["repository_branch"],
}
+todo_include_todos = True
diff --git a/docs/develop.md b/docs/develop.md
index 6b2d825b..e76f9048 100644
--- a/docs/develop.md
+++ b/docs/develop.md
@@ -1002,6 +1002,7 @@ Do you have other recommendations? Edit this page
:::{rubric} Software development in Python
:::
+- [Scientific Python Library Development Guide](https://learn.scientific-python.org/development/) for scientists and research software engineers.
- [Complete Python Development Guide ― testdriven.io](https://testdriven.io/guides/complete-python)
- _Effective Python: 90 Specific Ways to Write Better Python_ (2019) by Brett Slatkin
{cite}`slatkinEffectivePython902019`. See also
diff --git a/docs/index.md b/docs/index.md
index df269337..2a8c273c 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -8,21 +8,21 @@
{{ '[](https://colab.research.google.com/github/ComPWA/compwa.github.io/blob/{})'.format(branch) }} {{ '[](https://mybinder.org/v2/gh/ComPWA/compwa.github.io/{}?filepath=docs/usage)'.format(branch) }}
-The ["Common Partial Wave Analysis"](https://github.com/ComPWA) organization (ComPWA)
-aims to make amplitude analysis accessible through transparent and interactive
-documentation, modern software development tools, and collaboration-independent
-frameworks. **Contact details** can be found [here](https://github.com/ComPWA).
+The ["Common Partial Wave Analysis"](https://github.com/ComPWA) organization (ComPWA) aims to make amplitude analysis accessible through transparent and interactive documentation, modern software development tools, and collaboration-independent frameworks. One major novelty is that we [formulate amplitude models symbolically](./symbolics.ipynb) with a Computer Algebra System, which results in a **self-documenting workflow** with high-performance, **backend-agnostic computations**.
+
+Contact details can be found [here](https://github.com/ComPWA) on our GitHub organization page.
+
+:::{card} {material-outlined}`calculate;1.5em` Symbolic amplitude models
+:link: symbolics
+:link-type: doc
+Read more about computations with symbolic amplitude models here.
+:::
## Main projects
-ComPWA maintains **three main Python packages** with which you can do a full partial
-wave analysis. The packages are designed as **libraries**, so that they can be used
-separately by other projects.
+ComPWA maintains **three main Python packages** with which you can do a full partial wave analysis. The packages are designed as **libraries**, so that they can be used separately by other projects.
-Each of these libraries come with **interactive and interlinked documentation** that is
-intended to bring theory and code closer together. The PWA Pages takes that one step
-further: it is an independent and easy-to-maintain documentation project that can serve
-as a central place to gather links to PWA theory and software.
+Each of these libraries come with **interactive and interlinked documentation** that is intended to bring theory and code closer together. The PWA Pages takes that one step further: it is an independent and easy-to-maintain documentation project that can serve as a central place to gather links to PWA theory and software.
::::{grid} 1 2 2 2
@@ -253,6 +253,7 @@ more about our ideals and ongoing projects on the {doc}`main` page.
caption: Resources
hidden:
---
+symbolics
develop
adr
reports
diff --git a/docs/report/000.ipynb b/docs/report/000.ipynb
index a6e8c59c..5e783c4a 100644
--- a/docs/report/000.ipynb
+++ b/docs/report/000.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/001.ipynb b/docs/report/001.ipynb
index 49a7c6f2..bb23e434 100644
--- a/docs/report/001.ipynb
+++ b/docs/report/001.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/002.ipynb b/docs/report/002.ipynb
index 94b56895..e0213b7f 100644
--- a/docs/report/002.ipynb
+++ b/docs/report/002.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/003.ipynb b/docs/report/003.ipynb
index 1392e4a6..6ec82ac9 100644
--- a/docs/report/003.ipynb
+++ b/docs/report/003.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -111,8 +87,8 @@
"source": [
"%config InlineBackend.figure_formats = ['svg']\n",
"\n",
- "%matplotlib widget\n",
"import inspect\n",
+ "import os\n",
"import warnings\n",
"from functools import partial\n",
"from typing import Any\n",
@@ -139,7 +115,33 @@
"from tqdm.auto import tqdm\n",
"\n",
"warnings.filterwarnings(\"ignore\")\n",
- "PDG = qrules.load_pdg()"
+ "PDG = qrules.load_pdg()\n",
+ "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
]
},
{
@@ -413,6 +415,19 @@
")"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/docs/report/004.ipynb b/docs/report/004.ipynb
index 2b24752c..5e618685 100644
--- a/docs/report/004.ipynb
+++ b/docs/report/004.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -81,19 +57,6 @@
"%pip install -q ampform==0.14.10 sympy==1.12"
]
},
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "tags": [
- "remove-input"
- ]
- },
- "outputs": [],
- "source": [
- "%matplotlib widget"
- ]
- },
{
"cell_type": "code",
"execution_count": null,
@@ -126,6 +89,31 @@
"STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -281,6 +269,19 @@
" display(Image(output_file))"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/docs/report/005.ipynb b/docs/report/005.ipynb
index 5b312aab..39dc1183 100644
--- a/docs/report/005.ipynb
+++ b/docs/report/005.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -91,19 +67,6 @@
"%pip install -q ampform==0.11.* matplotlib==3.5.1"
]
},
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "tags": [
- "remove-input"
- ]
- },
- "outputs": [],
- "source": [
- "%matplotlib widget"
- ]
- },
{
"cell_type": "code",
"execution_count": null,
@@ -119,7 +82,6 @@
"source": [
"from __future__ import annotations\n",
"\n",
- "import os\n",
"import warnings\n",
"\n",
"import graphviz\n",
@@ -133,8 +95,32 @@
"from matplotlib import cm\n",
"from mpl_interactions.controller import Controls\n",
"\n",
- "warnings.filterwarnings(\"ignore\")\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
+ "warnings.filterwarnings(\"ignore\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
]
},
{
@@ -784,6 +770,19 @@
":::"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/docs/report/006.ipynb b/docs/report/006.ipynb
index 9971e591..d6dcf542 100644
--- a/docs/report/006.ipynb
+++ b/docs/report/006.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -78,19 +54,6 @@
""
]
},
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "tags": [
- "remove-input"
- ]
- },
- "outputs": [],
- "source": [
- "%matplotlib widget"
- ]
- },
{
"cell_type": "code",
"execution_count": null,
@@ -115,6 +78,31 @@
"STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {
@@ -234,6 +222,19 @@
"Matplotlib provides its own way to define {mod}`matplotlib.widgets`."
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/docs/report/007.ipynb b/docs/report/007.ipynb
index 19e91e9a..6748a966 100644
--- a/docs/report/007.ipynb
+++ b/docs/report/007.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -81,12 +57,8 @@
},
"outputs": [],
"source": [
- "import os\n",
- "\n",
"import sympy as sp\n",
- "from IPython.display import display\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
+ "from IPython.display import display"
]
},
{
diff --git a/docs/report/008.ipynb b/docs/report/008.ipynb
index bdea6c60..4017907d 100644
--- a/docs/report/008.ipynb
+++ b/docs/report/008.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/009.ipynb b/docs/report/009.ipynb
index 16f1589e..c20c0999 100644
--- a/docs/report/009.ipynb
+++ b/docs/report/009.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -135,19 +111,6 @@
"In addition, one often uses an \"energy dependent\" [`coupled_width()`](https://ampform.readthedocs.io/en/0.10.5/api/ampform.dynamics.html#ampform.dynamics.coupled_width) $\\Gamma_R(m)$ instead of a fixed width $\\Gamma_R$ as done in [TR-005](005.ipynb)."
]
},
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "tags": [
- "remove-input"
- ]
- },
- "outputs": [],
- "source": [
- "%matplotlib widget"
- ]
- },
{
"cell_type": "code",
"execution_count": null,
@@ -191,6 +154,31 @@
"STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -678,6 +666,19 @@
":::"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/docs/report/010.ipynb b/docs/report/010.ipynb
index 419dae57..563c5093 100644
--- a/docs/report/010.ipynb
+++ b/docs/report/010.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -72,19 +48,6 @@
"%pip install -q ampform==0.10.5 sympy==1.8"
]
},
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "tags": [
- "remove-input"
- ]
- },
- "outputs": [],
- "source": [
- "%matplotlib widget"
- ]
- },
{
"cell_type": "code",
"execution_count": null,
@@ -133,6 +96,31 @@
"STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -447,6 +435,19 @@
"## Visualization"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/docs/report/011.ipynb b/docs/report/011.ipynb
index d30200e8..a36c9bd3 100644
--- a/docs/report/011.ipynb
+++ b/docs/report/011.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/012.ipynb b/docs/report/012.ipynb
index b8573ac3..3c743742 100644
--- a/docs/report/012.ipynb
+++ b/docs/report/012.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/013.ipynb b/docs/report/013.ipynb
index 1b4b10c1..7a18e3e3 100644
--- a/docs/report/013.ipynb
+++ b/docs/report/013.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/014.ipynb b/docs/report/014.ipynb
index 2e067e93..530cee2f 100644
--- a/docs/report/014.ipynb
+++ b/docs/report/014.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -289,7 +265,7 @@
"\n",
"model = builder.formulate()\n",
"full_expression = remove_coefficients(model.expression)\n",
- "I = sp.Symbol(\"I\") # noqa: E741\n",
+ "I = sp.Symbol(\"I\")\n",
"latex = sp.multiline_latex(I, full_expression)\n",
"Math(latex)"
]
diff --git a/docs/report/015.ipynb b/docs/report/015.ipynb
index 147180a8..22ee0fa8 100644
--- a/docs/report/015.ipynb
+++ b/docs/report/015.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -158,7 +134,7 @@
"source": [
"\n",
"\n",
- "The specific {attr}`~qrules.transition.State.spin_projection`s for each {attr}`~qrules.transition.State.particle` only make sense _given a specific reference frame_. AmpForm's {class}`~ampform.helicity.HelicityAmplitudeBuilder` interprets these projections as the **helicity** $\\lambda=\\vec{S}\\cdot\\vec{p}$ of each particle _in the rest frame of the parent particle_. For example, the helicity $\\lambda_2=+\\tfrac{1}{2}$ of $\\bar p$ is the helicity as measured in the rest frame of resonance $\\bar\\Sigma(1660)^-$. The reason is that these helicities are needed when formulating the two-particle state for the decay node $\\bar\\Sigma(1660)^- \\to K^0\\bar p$ (see {doc}`ampform:usage/helicity/formalism`).\n",
+ "The specific {attr}`~qrules.transition.State.spin_projection`s for each {attr}`~qrules.transition.State.particle` only make sense _given a specific reference frame_. AmpForm's {class}`~ampform.helicity.HelicityAmplitudeBuilder` interprets these projections as the **helicity** $\\lambda=\\vec{S}\\cdot\\vec{p}$ of each particle _in the rest frame of the parent particle_. For example, the helicity $\\lambda_2=+\\tfrac{1}{2}$ of $\\bar p$ is the helicity as measured in the rest frame of resonance $\\bar\\Sigma(1660)^-$. The reason is that these helicities are needed when formulating the two-particle state for the decay node $\\bar\\Sigma(1660)^- \\to K^0\\bar p$ (see {external+ampform-0.14.x:doc}`ampform:usage/helicity/formalism`).\n",
"\n",
"Ignoring dynamics and coefficients, the {class}`~ampform.helicity.HelicityModel` for this single transition is rather simple:"
]
@@ -237,7 +213,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "Now, as {func}`~ampform.helicity.formulate_wigner_d` explains, the numbers that appear in the Wigner-$D$ functions here are computed from the helicities of the decay products. But there's a subtle problem: these helicities are _assumed to be in the rest frame of each parent particle_. For the first node, this is fine, because the parent particle rest frame matches that of the initial state in the {external+qrules-0.9.x:class}`.StateTransition` above. In the second node, however, we are in a different rest frame. This can result in phase differences for the different amplitudes.\n",
+ "Now, as {external+ampform-0.14.x:func}`~.formulate_wigner_d` explains, the numbers that appear in the Wigner-$D$ functions here are computed from the helicities of the decay products. But there's a subtle problem: these helicities are _assumed to be in the rest frame of each parent particle_. For the first node, this is fine, because the parent particle rest frame matches that of the initial state in the {external+qrules-0.9.x:class}`.StateTransition` above. In the second node, however, we are in a different rest frame. This can result in phase differences for the different amplitudes.\n",
"\n",
"If there is a single decay {class}`~qrules.topology.Topology` in the {class}`~qrules.transition.ReactionInfo` object for which we are formulating an amplitude model, the problem we identified here can be ignored. The reason is that the phase difference for each {external+qrules-0.9.x:class}`.StateTransition` (with each an identical decay {class}`~qrules.topology.Topology`) is the same and does not introduce interference effects within the coherent sum. It again becomes a problem, however, when we are formulating an amplitude model _with different topologies_. An example would be the following reaction:"
]
@@ -384,7 +360,7 @@
"\n",
"\n",
"\n",
- "The dashed edges and bars above the state IDs indicate \"opposite helicity\" states. The helicity of an **opposite helicity state** gets a minus sign in the Wigner-$D$ function for a two-body state as formulated by {func}`~ampform.helicity.formulate_wigner_d` (see {ref}`report/015:Helicity formalism`) and therefore needs to be defined consistently. AmpForm does this with {func}`~ampform.helicity.decay.is_opposite_helicity_state`.\n",
+ "The dashed edges and bars above the state IDs indicate \"opposite helicity\" states. The helicity of an **opposite helicity state** gets a minus sign in the Wigner-$D$ function for a two-body state as formulated by {external+ampform-0.14.x:func}`.formulate_wigner_d` (see {ref}`report/015:Helicity formalism`) and therefore needs to be defined consistently. AmpForm does this with {external+ampform-0.14.x:func}`.is_opposite_helicity_state`.\n",
"\n",
"Opposite helicity states are also of importance in the spin alignment procedure sketched by {cite}`marangottoHelicityAmplitudesGeneric2020`. The Wigner-$D$ functions that appear in Equations (45) and (46) from {cite}`marangottoHelicityAmplitudesGeneric2020`, operate on the spin of the final state, but the angles in the Wigner-$D$ function are taken from the sibling state:"
]
@@ -574,7 +550,7 @@
"source": [
"In this section, we test some of the functions from the {mod}`~ampform.helicity` and {mod}`~ampform.kinematics` modules to see if they reproduce Equations {eq}`alignment-R`, {eq}`alignment-S`, and {eq}`alignment-U`. We perform this test on the channel $J/\\psi \\to K^0 \\Sigma^+ \\bar{p}$ with resonances generated for each of the three allowed three-body topologies. The transition that corresponds to Equation {eq}`alignment-R` is shown below.\n",
"\n",
- "The first step is to use {func}`~ampform.helicity.formulate_helicity_rotation_chain` to generate the Wigner-$D$ functions for all **helicity rotations** for each final state. These helicity rotations \"undo\" all rotations that came from each Lorentz boosts when boosting from initial state $J/\\psi$ to each final state:"
+ "The first step is to use {external+ampform-0.14.x:func}`.formulate_helicity_rotation_chain` to generate the Wigner-$D$ functions for all **helicity rotations** for each final state. These helicity rotations \"undo\" all rotations that came from each Lorentz boosts when boosting from initial state $J/\\psi$ to each final state:"
]
},
{
@@ -693,7 +669,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "The function {func}`~ampform.helicity.formulate_rotation_chain` goes one step further. It adds a **Wigner rotation** to the generated list of helicity rotation Wigner-$D$ functions in case there are resonances in between the initial state and rotated final state. If there are no resonances in between (here, state `2`, the $\\bar p$), there is only one helicity rotation and there is no need for a Wigner rotation."
+ "The function {external+ampform-0.14.x:func}`.formulate_rotation_chain` goes one step further. It adds a **Wigner rotation** to the generated list of helicity rotation Wigner-$D$ functions in case there are resonances in between the initial state and rotated final state. If there are no resonances in between (here, state `2`, the $\\bar p$), there is only one helicity rotation and there is no need for a Wigner rotation."
]
},
{
@@ -792,7 +768,7 @@
"source": [
"**These are indeed all the terms that we see in Equation {eq}`alignment-R`!**\n",
"\n",
- "To create all sum combinations for all final states, we can use {func}`~ampform.helicity.formulate_spin_alignment`. This should give the sum of Eq.(45):"
+ "To create all sum combinations for all final states, we can use {external+ampform-0.14.x:func}`.formulate_spin_alignment`. This should give the sum of Eq.(45):"
]
},
{
@@ -1305,7 +1281,7 @@
"source": [
":::{note}\n",
"\n",
- "In the topology underlying {eq}`alignment-U`, the Wigner rotation matrix with angles $\\alpha_0^{12}, \\beta_0^{12}, \\gamma_0^{12}$ is simply the identity matrix. This is the reason why it can be omitted in {func}`~ampform.helicity.formulate_rotation_chain` and we only have one helicity rotation.\n",
+ "In the topology underlying {eq}`alignment-U`, the Wigner rotation matrix with angles $\\alpha_0^{12}, \\beta_0^{12}, \\gamma_0^{12}$ is simply the identity matrix. This is the reason why it can be omitted in {external+ampform-0.14.x:func}`.formulate_rotation_chain` and we only have one helicity rotation.\n",
"\n",
":::"
]
diff --git a/docs/report/016.ipynb b/docs/report/016.ipynb
index 30f50cce..702b2106 100644
--- a/docs/report/016.ipynb
+++ b/docs/report/016.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/017.ipynb b/docs/report/017.ipynb
index 216520c3..5cccfc3f 100644
--- a/docs/report/017.ipynb
+++ b/docs/report/017.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -89,6 +65,7 @@
"\n",
"from __future__ import annotations\n",
"\n",
+ "import os\n",
"import warnings\n",
"from typing import TYPE_CHECKING\n",
"\n",
@@ -114,6 +91,31 @@
"warnings.filterwarnings(\"ignore\")"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/018.ipynb b/docs/report/018.ipynb
index 1e040d32..f0015966 100644
--- a/docs/report/018.ipynb
+++ b/docs/report/018.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/019.ipynb b/docs/report/019.ipynb
index f9791cde..ac6662c4 100644
--- a/docs/report/019.ipynb
+++ b/docs/report/019.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/020.ipynb b/docs/report/020.ipynb
index 129eb57a..4a218fb9 100644
--- a/docs/report/020.ipynb
+++ b/docs/report/020.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/021.ipynb b/docs/report/021.ipynb
index e6f685b7..5d96be0a 100644
--- a/docs/report/021.ipynb
+++ b/docs/report/021.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -101,10 +77,9 @@
},
"outputs": [],
"source": [
- "%matplotlib widget\n",
- "\n",
"from __future__ import annotations\n",
"\n",
+ "import os\n",
"import itertools\n",
"import logging\n",
"from typing import TYPE_CHECKING\n",
@@ -147,6 +122,7 @@
"\n",
"LOGGER = logging.getLogger()\n",
"LOGGER.setLevel(logging.ERROR)\n",
+ "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)\n",
"\n",
"PDG = qrules.load_pdg()\n",
"\n",
@@ -185,6 +161,31 @@
"sp.Indexed._latex = _print_Indexed_latex"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {
@@ -1842,6 +1843,19 @@
"Finally, all intensities can be computed as follows:"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/docs/report/022.ipynb b/docs/report/022.ipynb
index c9c71a1d..939c2240 100644
--- a/docs/report/022.ipynb
+++ b/docs/report/022.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/023.ipynb b/docs/report/023.ipynb
index 8c812b91..0f42cf37 100644
--- a/docs/report/023.ipynb
+++ b/docs/report/023.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/report/024.ipynb b/docs/report/024.ipynb
index 188e7f4b..ffaf4a52 100644
--- a/docs/report/024.ipynb
+++ b/docs/report/024.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -48,7 +24,7 @@
":::{card} Symbolic expressions and model serialization\n",
"TR-024\n",
"^^^\n",
- "\n",
+ "\n",
"Investigation into dumping SymPy expressions to human-readable format for model preservation. The notebook was motivated by the [COMAP-V workshop on analysis preservation](https://indico.cern.ch/event/1348003/). See also SymPy [printing](https://docs.sympy.org/latest/modules/printing.html), [parsing](https://docs.sympy.org/latest/modules/parsing.html), and [expression manipulation](https://docs.sympy.org/latest/tutorials/intro-tutorial/manipulation.html).\n",
"+++\n",
"🚧 [polarimetry#319](https://github.com/ComPWA/polarimetry/pull/319)\n",
@@ -107,7 +83,6 @@
},
"outputs": [],
"source": [
- "import os\n",
"from pathlib import Path\n",
"from textwrap import shorten\n",
"\n",
diff --git a/docs/report/025.ipynb b/docs/report/025.ipynb
index fd5ba46f..ca0e7aa4 100644
--- a/docs/report/025.ipynb
+++ b/docs/report/025.ipynb
@@ -1,29 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "hideCode": true,
- "hideOutput": true,
- "hidePrompt": true,
- "jupyter": {
- "source_hidden": true
- },
- "slideshow": {
- "slide_type": "skip"
- },
- "tags": [
- "remove-cell"
- ]
- },
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -100,8 +76,7 @@
},
"outputs": [],
"source": [
- "%matplotlib widget\n",
- "\n",
+ "import os\n",
"from typing import Any\n",
"\n",
"import matplotlib.pyplot as plt\n",
@@ -113,7 +88,34 @@
"from IPython.display import Image, Math, display\n",
"from ipywidgets import FloatSlider, VBox, interactive_output\n",
"from plotly.colors import DEFAULT_PLOTLY_COLORS\n",
- "from plotly.subplots import make_subplots"
+ "from plotly.subplots import make_subplots\n",
+ "\n",
+ "STATIC_WEB_PAGE = {\"EXECUTE_NB\", \"READTHEDOCS\"}.intersection(os.environ)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
]
},
{
@@ -353,6 +355,19 @@
"In the following widget, we see what the new **rotated square root** looks like in the complex plane. The left panes show the imaginary part and the right side shows the real part. The upper figures show the value of the rotated square root on the real axis, $\\mathrm{Re}\\,z$."
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/docs/report/026.ipynb b/docs/report/026.ipynb
new file mode 100755
index 00000000..d538a189
--- /dev/null
+++ b/docs/report/026.ipynb
@@ -0,0 +1,688 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "```{autolink-concat}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "::::{margin}\n",
+ ":::{card} Visualization of the Riemann sheets for the single-channel $T$ matrix with one resonance pole\n",
+ "TR-026\n",
+ "^^^\n",
+ "This report investigates and reproduces the Riemann sheets shown in [Fig. 50.1](https://pdg.lbl.gov/2023/reviews/rpp2023-rev-resonances.pdf#page=2) and [50.2](https://pdg.lbl.gov/2023/reviews/rpp2023-rev-resonances.pdf#page=4) of the PDG. The lineshape parametrization is directly derived with the $K$-matrix formalism. The transition from the first physical sheet to the second unphysical sheet is derived using analytic continuation.\n",
+ "+++\n",
+ "🚧 [ampform#67](https://github.com/ComPWA/ampform/issues/67)\n",
+ ":::\n",
+ "::::"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "# Single-channel Riemann sheets"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "The $T$ function can be extended into the complex plane. This results in $2^n$ Riemann sheets for $n$ channels, each starting at the threshold $s_{thr}=(m_1+m_2)^{2}$ of the two final state particles, the so-called branching point of the respective channel going along the so-called branch cut along the real axis where the function is not uniquely defined to $+\\infty$. This choice of the direction of the brach cut is most commonly used in particle physics. The physical Riemann sheet is defined for positive imaginary part (1st quadrant of the complex plane) and the unphysical Riemann sheets are only defined for negative imaginary part (4th quadrant of the complex plane). For the single-channel case there are two Riemann sheets, one physical and one unphysical."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%pip install -q ampform==0.15.0 plotly==5.18.0 sympy==1.12"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "mystnb": {
+ "code_prompt_show": "Import Python libraries"
+ },
+ "tags": [
+ "hide-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "from __future__ import annotations\n",
+ "\n",
+ "import warnings\n",
+ "from typing import Any\n",
+ "\n",
+ "import matplotlib.pyplot as plt\n",
+ "import numpy as np\n",
+ "import plotly.graph_objects as go\n",
+ "import sympy as sp\n",
+ "from ampform.io import aslatex\n",
+ "from ampform.sympy import unevaluated\n",
+ "from IPython.display import Math\n",
+ "\n",
+ "warnings.filterwarnings(\"ignore\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "## Phase space factor definitions"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input",
+ "scroll-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "from ampform.kinematics.phasespace import Kallen\n",
+ "\n",
+ "\n",
+ "@unevaluated(real=False)\n",
+ "class PhaseSpaceFactor(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"\\rho_{{{m1}, {m2}}}\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " return sp.sqrt((s - ((m1 + m2) ** 2)) * (s - (m1 - m2) ** 2) / s**2)\n",
+ "\n",
+ "\n",
+ "@unevaluated(real=False)\n",
+ "class PhaseSpaceCM(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"\\rho^\\mathrm{{CM}}_{{{m1},{m2}}}\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " return -16 * sp.pi * sp.I * ChewMandelstam(s, m1, m2)\n",
+ "\n",
+ "\n",
+ "@unevaluated(real=False)\n",
+ "class ChewMandelstam(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"\\Sigma\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " q = BreakupMomentum(s, m1, m2)\n",
+ " return (\n",
+ " 1\n",
+ " / (16 * sp.pi**2)\n",
+ " * (\n",
+ " (2 * q / sp.sqrt(s))\n",
+ " * sp.log((m1**2 + m2**2 - s + 2 * sp.sqrt(s) * q) / (2 * m1 * m2))\n",
+ " - (m1**2 - m2**2) * (1 / s - 1 / (m1 + m2) ** 2) * sp.log(m1 / m2)\n",
+ " )\n",
+ " )\n",
+ "\n",
+ "\n",
+ "@unevaluated(real=False)\n",
+ "class BreakupMomentum(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"q\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " return sp.sqrt(Kallen(s, m1**2, m2**2)) / (2 * sp.sqrt(s))\n",
+ "\n",
+ "\n",
+ "s, m1, m2 = sp.symbols(\"s m1 m2\")\n",
+ "rho_expr = PhaseSpaceFactor(s, m1, m2)\n",
+ "rho_cm_expr = PhaseSpaceCM(s, m1, m2)\n",
+ "cm_expr = ChewMandelstam(s, m1, m2)\n",
+ "q_expr = BreakupMomentum(s, m1, m2)\n",
+ "kallen = Kallen(*sp.symbols(\"x:z\"))\n",
+ "src = aslatex({\n",
+ " e: e.doit(deep=False) for e in [rho_expr, rho_cm_expr, cm_expr, q_expr, kallen]\n",
+ "})\n",
+ "Math(src)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "## T matrix definition with K matrix"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The dynamical part of the scattering amplitude is calculated via $K$ matrix formalism. In this report the single-channel case with one resonance pole is assumed."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "n = 1\n",
+ "I = sp.Identity(n)\n",
+ "K = sp.MatrixSymbol(\"K\", n, n)\n",
+ "CM = sp.MatrixSymbol(R\"{\\rho_{cm}}\", n, n)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T1 = (I + sp.I * K * CM).inv() * K\n",
+ "T1"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T1_explicit = T1.as_explicit()\n",
+ "T1_explicit[0, 0]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "g0, m0 = sp.symbols(R\"g^{0} m0\")\n",
+ "k_expr = (g0**2) / (s - m0**2)\n",
+ "definitions_I = {\n",
+ " K[0, 0]: k_expr,\n",
+ " CM[0, 0]: PhaseSpaceCM(s, m1, m2),\n",
+ "}\n",
+ "Math(aslatex(definitions_I))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "T1_expr = T1_explicit[0, 0].xreplace(definitions_I)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T1_expr.simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Calculation of the second Riemann sheet\n",
+ "Since the $T$ function is real below the branch cut it can be shown that the discontinuity above and below the threshold reads as:"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "$$\n",
+ "CM(s+i\\epsilon)-CM(s-i\\epsilon)= i\\rho -(-i\\rho) =2i\\rho\n",
+ "$$"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "when $\\epsilon$ goes to zero.
\n",
+ "Which leads to:"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "$$\n",
+ "CM^{-1}_{\\mathrm{II}}(s-i\\epsilon)= Re(CM^{-1}_{\\mathrm{I}}(s-i\\epsilon))-i\\rho+2i\\rho\n",
+ "$$"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "For the the Amplitude for the second sheet is defined as:\n",
+ "\n",
+ ":::{card}\n",
+ "$$\n",
+ "A^{-1}_{\\mathrm{II}}(s)= A^{-1}_{\\mathrm{I}}(s)-2i\\rho\n",
+ "$$\n",
+ ":::"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "rho = sp.MatrixSymbol(\"rho\", n, n)\n",
+ "T2 = (T1.inv() + 2 * sp.I * rho).inv()\n",
+ "T2"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "definitions_II = {\n",
+ " **definitions_I,\n",
+ " rho[0, 0]: PhaseSpaceFactor(s, m1, m2),\n",
+ "}\n",
+ "T2_explicit = T2.as_explicit()\n",
+ "T2_expr = T2_explicit[0, 0].xreplace(definitions_II)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T2_expr.simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "## Visualization of the 2 dimensional lineshape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "mystnb": {
+ "code_prompt_show": "Define numerical functions"
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "symbols = sp.Tuple(s, m1, m2, m0, g0)\n",
+ "T1_func = sp.lambdify(symbols, T1_expr.doit())\n",
+ "T2_func = sp.lambdify(symbols, T2_expr.doit())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "mystnb": {
+ "code_prompt_show": "Define meshgrid and parameter values"
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "epsilon = 1e-5\n",
+ "x = np.linspace(0, 6, num=200)\n",
+ "y = np.linspace(epsilon, 1, num=100)\n",
+ "X, Y = np.meshgrid(x, y)\n",
+ "Zn = X - Y * 1j\n",
+ "Zp = X + Y * 1j\n",
+ "\n",
+ "values = {\n",
+ " m1: 0.9,\n",
+ " m2: 0.8,\n",
+ " m0: 3.1,\n",
+ " g0: 1.5,\n",
+ "}\n",
+ "args = eval(str(symbols[1:].xreplace(values)))\n",
+ "\n",
+ "T1n = T1_func(Zn**2, *args)\n",
+ "T1p = T1_func(Zp**2, *args)\n",
+ "\n",
+ "T2n = T2_func(Zn**2, *args)\n",
+ "T2p = T2_func(Zp**2, *args)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "full-width",
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "%config InlineBackend.figure_formats = [\"svg\"]\n",
+ "\n",
+ "plt.rcParams.update({\"font.size\": 16})\n",
+ "fig, axes = plt.subplots(figsize=(15, 6), ncols=2, sharey=True)\n",
+ "ax1, ax2 = axes\n",
+ "for ax in axes:\n",
+ " ax.set_xlabel(R\"$\\mathrm{Re}\\,\\sqrt{s}$\")\n",
+ "ax1.set_ylabel(R\"$\\mathrm{Im}\\,T$\")\n",
+ "\n",
+ "ax1.plot(x, T1n[0].imag, label=R\"$T_\\mathrm{I}(s-0i)$\")\n",
+ "ax1.plot(x, T1p[0].imag, label=R\"$T_\\mathrm{I}(s+0i)$\")\n",
+ "ax1.set_title(f\"${sp.latex(rho_cm_expr)}$\")\n",
+ "ax1.set_title(R\"$T_\\mathrm{I}$\")\n",
+ "\n",
+ "ax2.plot(x, T2n[0].imag, label=R\"$T_\\mathrm{II}(s-0i)$\")\n",
+ "ax2.plot(x, T2p[0].imag, label=R\"$T_\\mathrm{II}(s+0i)$\")\n",
+ "ax2.set_title(R\"$T_\\mathrm{II}$\")\n",
+ "\n",
+ "for ax in axes:\n",
+ " ax.legend()\n",
+ "\n",
+ "fig.tight_layout()\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "The Amplitude for the second sheet is only defined for $s$ positive real part and negative complex part. It inherits the analytic structure of the phasespace factor $\\rho$ (the branch cut starting form zero and from $s=s_{thr}$ on the real axis). So it is only defined up to the closest branch cut which is in this case the cut at $s=s_{thr}$."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Visualization of the Riemann sheets"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input",
+ "scroll-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "def sty(sheet_name: str) -> dict:\n",
+ " sheet_color = sheet_colors[sheet_name]\n",
+ " n_lines = 12\n",
+ " return dict(\n",
+ " cmin=-vmax,\n",
+ " cmax=+vmax,\n",
+ " colorscale=[[0, \"rgb(0, 0, 0)\"], [1, sheet_color]],\n",
+ " contours=dict(\n",
+ " x=dict(\n",
+ " show=True,\n",
+ " start=x.min(),\n",
+ " end=x.max(),\n",
+ " size=(x.max() - x.min()) / n_lines,\n",
+ " color=\"black\",\n",
+ " width=1,\n",
+ " ),\n",
+ " y=dict(\n",
+ " show=True,\n",
+ " start=-y.max(),\n",
+ " end=+y.max(),\n",
+ " size=(y.max() - y.min()) / (n_lines // 2),\n",
+ " color=\"black\",\n",
+ " width=1,\n",
+ " ),\n",
+ " ),\n",
+ " name=sheet_name,\n",
+ " opacity=0.4,\n",
+ " showscale=False,\n",
+ " )\n",
+ "\n",
+ "\n",
+ "vmax = 1.6\n",
+ "project = np.imag\n",
+ "sheet_colors = {\n",
+ " \"Physical (T1)\": \"blue\",\n",
+ " \"Unphysical (T2)\": \"red\",\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jp-MarkdownHeadingCollapsed": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input",
+ "scroll-input",
+ "full-width"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "Sp = go.Surface(x=X, y=Y, z=-T1p.imag, **sty(\"Physical (T1)\"))\n",
+ "Sn = go.Surface(x=X, y=-Y, z=-T2n.imag, **sty(\"Unphysical (T2)\"))\n",
+ "Sp.name = \"Physical sheet I\"\n",
+ "\n",
+ "s_thr = values[m1] + values[m2]\n",
+ "threshold_filter = x >= s_thr\n",
+ "lineshape = go.Scatter3d(\n",
+ " x=x[threshold_filter],\n",
+ " y=np.zeros(threshold_filter.shape),\n",
+ " z=project(-T1p[0])[threshold_filter],\n",
+ " line=dict(color=\"yellow\", width=10),\n",
+ " mode=\"lines\",\n",
+ " name=\"Lineshape\",\n",
+ ")\n",
+ "point = go.Scatter3d(\n",
+ " x=[s_thr],\n",
+ " y=[0],\n",
+ " z=[0],\n",
+ " mode=\"markers\",\n",
+ " marker=dict(color=\"black\", size=6),\n",
+ " name=\"Branch point\",\n",
+ ")\n",
+ "\n",
+ "fig = go.Figure(data=[Sn, Sp, lineshape, point])\n",
+ "fig.update_layout(\n",
+ " height=550,\n",
+ " margin=dict(l=0, r=0, t=30, b=0),\n",
+ " showlegend=True,\n",
+ " legend=dict(\n",
+ " orientation=\"v\",\n",
+ " xanchor=\"left\",\n",
+ " yanchor=\"top\",\n",
+ " x=0.05,\n",
+ " y=0.95,\n",
+ " font=dict(size=24),\n",
+ " ),\n",
+ " title_text=\"Im(T) with Chew-Mandelstam phase space factor\",\n",
+ " title_font=dict(size=28),\n",
+ " title=dict(y=0.989),\n",
+ ")\n",
+ "fig.update_scenes(\n",
+ " camera_center=dict(z=-0.2),\n",
+ " xaxis_title_text=\"Re √s\",\n",
+ " yaxis_title_text=\"Im √s\",\n",
+ " zaxis_title_text=\"Im T(s)\",\n",
+ " zaxis_range=[-vmax, +vmax],\n",
+ ")\n",
+ "\n",
+ "fig.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "The lineshape, the part that is observed within the experiment, is given as the intersection of the Riemann sheets with real plane. Also note that the second Riemann sheets transitions smoothly into the first one. "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ ":::{attention}\n",
+ ":name: Discontinuity\n",
+ "Not that the second Riemann sheet also inherits the singularity at $s=0$, as it is derived from the common phasespace factor.\n",
+ ":::"
+ ]
+ }
+ ],
+ "metadata": {
+ "colab": {
+ "toc_visible": true
+ },
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.13"
+ },
+ "orphan": true
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/docs/report/027.ipynb b/docs/report/027.ipynb
new file mode 100755
index 00000000..fd551e1d
--- /dev/null
+++ b/docs/report/027.ipynb
@@ -0,0 +1,1205 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "```{autolink-concat}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "::::{margin}\n",
+ ":::{card} Visualization of the Riemann sheets for the two-channel $T$-matrix with one pole\n",
+ "TR-027\n",
+ "^^^\n",
+ "Following **[TR-026](./026.ipynb)**, the Riemann sheets for the amplitude calculated within the $K$-matrix formalism for the two-channel case are visualized. The method of transitioning from the first physical sheet to the unphysical sheets is extended to the two dimensional case using [Eur. Phys. J. C (2023) 83:850](https://juser.fz-juelich.de/record/1017534/files/s10052-023-11953-6.pdf) in order to visualize the third and the fourth unphysical sheet.\n",
+ "+++\n",
+ "🚧 [ampform#67](https://github.com/ComPWA/ampform/issues/67)\n",
+ ":::\n",
+ "::::"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "# Coupled channel Riemann sheets\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%pip install -q ampform==0.15.0 plotly==5.18.0 sympy==1.12"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "mystnb": {
+ "code_prompt_show": "Import Python libraries"
+ },
+ "tags": [
+ "hide-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "from __future__ import annotations\n",
+ "\n",
+ "import os\n",
+ "import warnings\n",
+ "from typing import Any\n",
+ "\n",
+ "import matplotlib.pyplot as plt\n",
+ "import numpy as np\n",
+ "import plotly.graph_objects as go\n",
+ "import sympy as sp\n",
+ "from ampform.io import aslatex\n",
+ "from ampform.kinematics.phasespace import Kallen\n",
+ "from ampform.sympy import unevaluated\n",
+ "from IPython.display import Math, display\n",
+ "from ipywidgets import widgets as w\n",
+ "from plotly.colors import DEFAULT_PLOTLY_COLORS\n",
+ "from plotly.subplots import make_subplots\n",
+ "\n",
+ "warnings.filterwarnings(\"ignore\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "if \"COLAB_RELEASE_TAG\" in os.environ:\n",
+ " import subprocess\n",
+ "\n",
+ " from google.colab import output\n",
+ "\n",
+ " output.enable_custom_widget_manager()\n",
+ " subprocess.run(\"pip install -q ipympl\".split(), check=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "## Expression definitions"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "@unevaluated(real=False)\n",
+ "class PhaseSpaceFactor(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"\\rho_{{{m1}, {m2}}}\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " return sp.sqrt((s - ((m1 + m2) ** 2)) * (s - (m1 - m2) ** 2) / s**2)\n",
+ "\n",
+ "\n",
+ "s, m1, m2 = sp.symbols(\"s m1 m2\")\n",
+ "rho_expr = PhaseSpaceFactor(s, m1, m2)\n",
+ "Math(aslatex({rho_expr: rho_expr.doit(deep=False)}))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "@unevaluated(real=False)\n",
+ "class PhaseSpaceFactorKallen(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"\\rho_{{{m1}, {m2}}}\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " return 2 * BreakupMomentum(s, m1, m2) / sp.sqrt(s)\n",
+ "\n",
+ "\n",
+ "@unevaluated(real=False)\n",
+ "class PhaseSpaceCM(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"\\rho^\\mathrm{{CM}}_{{{m1},{m2}}}\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " return -16 * sp.pi * sp.I * ChewMandelstam(s, m1, m2)\n",
+ "\n",
+ "\n",
+ "@unevaluated(real=False)\n",
+ "class ChewMandelstam(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"\\Sigma\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " q = BreakupMomentum(s, m1, m2)\n",
+ " return (\n",
+ " 1\n",
+ " / (16 * sp.pi**2)\n",
+ " * (\n",
+ " (2 * q / sp.sqrt(s))\n",
+ " * sp.log((m1**2 + m2**2 - s + 2 * sp.sqrt(s) * q) / (2 * m1 * m2))\n",
+ " - (m1**2 - m2**2) * (1 / s - 1 / (m1 + m2) ** 2) * sp.log(m1 / m2)\n",
+ " )\n",
+ " )\n",
+ "\n",
+ "\n",
+ "@unevaluated(real=False)\n",
+ "class BreakupMomentum(sp.Expr):\n",
+ " s: Any\n",
+ " m1: Any\n",
+ " m2: Any\n",
+ " _latex_repr_ = R\"q\\left({s}\\right)\"\n",
+ "\n",
+ " def evaluate(self) -> sp.Expr:\n",
+ " s, m1, m2 = self.args\n",
+ " return sp.sqrt(Kallen(s, m1**2, m2**2)) / (2 * sp.sqrt(s))\n",
+ "\n",
+ "\n",
+ "s, m1, m2 = sp.symbols(\"s m1 m2\")\n",
+ "rho_expr_kallen = PhaseSpaceFactorKallen(s, m1, m2)\n",
+ "rho_cm_expr = PhaseSpaceCM(s, m1, m2)\n",
+ "cm_expr = ChewMandelstam(s, m1, m2)\n",
+ "q_expr = BreakupMomentum(s, m1, m2)\n",
+ "kallen = Kallen(*sp.symbols(\"x:z\"))\n",
+ "Math(\n",
+ " aslatex({\n",
+ " e: e.doit(deep=False)\n",
+ " for e in [rho_expr_kallen, rho_cm_expr, cm_expr, q_expr, kallen]\n",
+ " })\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "## Riemann sheet I"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "### Matrix definition"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "class DiagonalMatrix(sp.DiagonalMatrix):\n",
+ " def _latex(self, printer, *args):\n",
+ " return printer._print(self.args[0])\n",
+ "\n",
+ "\n",
+ "n = 2\n",
+ "I = sp.Identity(n)\n",
+ "K = sp.MatrixSymbol(\"K\", n, n)\n",
+ "CM = DiagonalMatrix(sp.MatrixSymbol(R\"\\rho^\\Sigma\", n, n))\n",
+ "Math(aslatex({CM: CM.as_explicit()}))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T_I = (I - sp.I * K * CM).inv() * K\n",
+ "T_I"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T_I_explicit = T_I.as_explicit()\n",
+ "T_I_explicit[0, 0].simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "### Parametrization"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "mystnb": {
+ "code_prompt_show": "Symbol definitions"
+ },
+ "tags": [
+ "hide-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "s = sp.Symbol(\"s\")\n",
+ "ma1 = sp.Symbol(\"m_{a1}\")\n",
+ "mb1 = sp.Symbol(\"m_{b1}\")\n",
+ "ma2 = sp.Symbol(\"m_{a2}\")\n",
+ "mb2 = sp.Symbol(\"m_{b2}\")\n",
+ "m0 = sp.Symbol(\"m0\")\n",
+ "w0 = sp.Symbol(\"Gamma0\")\n",
+ "g1 = sp.Symbol(R\"g^{0}_1\")\n",
+ "g2 = sp.Symbol(R\"g^{0}_2\")\n",
+ "symbols = sp.Tuple(s, ma1, mb1, ma2, mb2, m0, g1, g2)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "k_expr_00 = (g1 * g1 * m0) / (s - m0**2)\n",
+ "k_expr_10 = (g1 * g2 * m0) / (s - m0**2)\n",
+ "k_expr_11 = (g2 * g2 * m0) / (s - m0**2)\n",
+ "cm_expressions = {\n",
+ " K[0, 0]: k_expr_00,\n",
+ " K[1, 1]: k_expr_11,\n",
+ " K[0, 1]: k_expr_10,\n",
+ " K[1, 0]: k_expr_10,\n",
+ " CM[0, 0]: -PhaseSpaceCM(s, ma1, mb1),\n",
+ " CM[1, 1]: -PhaseSpaceCM(s, ma2, mb2),\n",
+ "}\n",
+ "Math(aslatex(cm_expressions))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T_I_cm_expr = T_I_explicit.xreplace(cm_expressions)\n",
+ "T_I_cm_expr[0, 0].simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "## Sheets II, III, and IV"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "In the case of two channels, there are four Riemann sheets. The first sheet ([Sheet I](#riemann-sheet-i)) is physical and three unphysical ones. The physical sheet is calculated using the analytic solution of the Chew-Mandelstam function.\n",
+ "\n",
+ "$$\n",
+ "\\begin{eqnarray}\n",
+ "\\operatorname{Disc}_{\\mathrm{I,II}} T_K^{-1}\n",
+ "&=& 2 i\\left[\\begin{array}{rr}\\rho_1 & 0 \\\\ 0 & 0 \\end{array}\\right], \\\\\n",
+ "\\operatorname{Disc}_{\\mathrm{I,III}} T_K^{-1}\n",
+ "&=& 2 i\\left[\\begin{array}{rr}\\rho_1 & 0 \\\\ 0 & \\rho_2 \\end{array}\\right], \\\\\n",
+ "\\operatorname{Disc}_{\\mathrm{I,IV}} T_K^{-1}\n",
+ "&=& 2 i\\left[\\begin{array}{rr}0 & 0 \\\\ 0& \\rho_2 \\end{array}\\right].\n",
+ "\\end{eqnarray}\n",
+ "$$\n",
+ "\n",
+ "Depending on the centre-of-mass energy, different Riemann sheets connect smoothly to the physical one. Therefore, two cases are studied: one where the resonance mass is above the threshold of the second and first channel, and another where the resonance mass is between the threshold of the first and second channel."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "rho = DiagonalMatrix(sp.MatrixSymbol(\"rho\", n, n))\n",
+ "Math(aslatex({rho: rho.as_explicit()}))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "T_II = (T_I.inv() + 2 * sp.I * rho).inv()\n",
+ "T_III = (T_I.inv() + 2 * sp.I * rho).inv()\n",
+ "T_IV = (-T_I.inv() - 2 * sp.I * rho).inv()\n",
+ "Math(aslatex([T_II, T_III, T_IV]))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": [
+ "full-width"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "T_II_explicit = T_II.as_explicit()\n",
+ "T_II_explicit[0, 0].simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": [
+ "full-width"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "T_III_explicit = T_III.as_explicit()\n",
+ "T_III_explicit[0, 0].simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": [
+ "full-width"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "T_IV_explicit = T_IV.as_explicit()\n",
+ "T_IV_explicit[0, 0].simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "rho_expressions_II = {\n",
+ " **cm_expressions,\n",
+ " rho[0, 0]: PhaseSpaceFactor(s, ma1, mb1),\n",
+ " rho[1, 1]: 0,\n",
+ "}\n",
+ "rho_expressions_III = {\n",
+ " **cm_expressions,\n",
+ " rho[0, 0]: PhaseSpaceFactor(s, ma1, mb1),\n",
+ " rho[1, 1]: PhaseSpaceFactor(s, ma2, mb2),\n",
+ "}\n",
+ "rho_expressions_IV = {\n",
+ " **cm_expressions,\n",
+ " rho[0, 0]: 0,\n",
+ " rho[1, 1]: PhaseSpaceFactor(s, ma2, mb2),\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form"
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "T_II_rho_expr = T_II_explicit.xreplace(rho_expressions_II)\n",
+ "T_III_rho_expr = T_III_explicit.xreplace(rho_expressions_III)\n",
+ "T_IV_rho_expr = T_IV_explicit.xreplace(rho_expressions_IV)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T_II_rho_expr[0, 0].simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "T_III_rho_expr[0, 0].simplify(doit=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Visualizations"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "jp-MarkdownHeadingCollapsed": true
+ },
+ "source": [
+ "### Lineshapes (real axis)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "full-width",
+ "hide-input",
+ "scroll-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "%config InlineBackend.figure_formats = [\"svg\"]\n",
+ "\n",
+ "T_I_func = sp.lambdify(symbols, T_I_cm_expr[0, 0].doit())\n",
+ "T_II_func = sp.lambdify(symbols, T_II_rho_expr[0, 0].doit())\n",
+ "T_III_func = sp.lambdify(symbols, T_III_rho_expr[0, 0].doit())\n",
+ "T_IV_func = sp.lambdify(symbols, T_IV_rho_expr[0, 0].doit())\n",
+ "parameter_defaults1 = {\n",
+ " ma1: 1.0,\n",
+ " mb1: 1.5,\n",
+ " ma2: 1.5,\n",
+ " mb2: 2.0,\n",
+ " m0: 4.0,\n",
+ " g1: 0.7,\n",
+ " g2: 0.7,\n",
+ "}\n",
+ "parameter_defaults2 = {\n",
+ " **parameter_defaults1,\n",
+ " m0: 3.0,\n",
+ "}\n",
+ "args1 = eval(str(symbols[1:].xreplace(parameter_defaults1)))\n",
+ "args2 = eval(str(symbols[1:].xreplace(parameter_defaults2)))\n",
+ "\n",
+ "epsilon = 1e-5\n",
+ "x = np.linspace(0, 8, num=300)\n",
+ "y = np.linspace(epsilon, 1, num=100)\n",
+ "X, Y = np.meshgrid(x, y)\n",
+ "Zn = X - Y * 1j\n",
+ "Zp = X + Y * 1j\n",
+ "\n",
+ "T1n_res1 = T_I_func(Zn**2, *args1)\n",
+ "T1p_res1 = T_I_func(Zp**2, *args1)\n",
+ "\n",
+ "T2n_res1 = T_II_func(Zn**2, *args1)\n",
+ "T2p_res1 = T_II_func(Zp**2, *args1)\n",
+ "\n",
+ "T3n_res1 = T_III_func(Zn**2, *args1)\n",
+ "T3p_res1 = T_III_func(Zp**2, *args1)\n",
+ "\n",
+ "T4n_res1 = T_IV_func(Zn**2, *args1)\n",
+ "T4p_res1 = T_IV_func(Zp**2, *args1)\n",
+ "\n",
+ "T1n_res2 = T_I_func(Zn**2, *args2)\n",
+ "T1p_res2 = T_I_func(Zp**2, *args2)\n",
+ "\n",
+ "T2n_res2 = T_II_func(Zn**2, *args2)\n",
+ "T2p_res2 = T_II_func(Zp**2, *args2)\n",
+ "\n",
+ "T3n_res2 = T_III_func(Zn**2, *args2)\n",
+ "T3p_res2 = T_III_func(Zp**2, *args2)\n",
+ "\n",
+ "T4n_res2 = T_IV_func(Zn**2, *args2)\n",
+ "T4p_res2 = T_IV_func(Zp**2, *args2)\n",
+ "\n",
+ "fig, axes = plt.subplots(figsize=(11, 6), ncols=4, sharey=True)\n",
+ "ax1, ax2, ax3, ax4 = axes.flatten()\n",
+ "\n",
+ "ax1.plot(x, T1n_res1[0].imag, label=R\"$T_\\mathrm{I}(s-0i)$\")\n",
+ "ax1.plot(x, T1p_res1[0].imag, label=R\"$T_\\mathrm{I}(s+0i)$\")\n",
+ "ax1.set_title(f\"${sp.latex(rho_cm_expr)}$\")\n",
+ "ax1.set_title(R\"$T_\\mathrm{I}$\")\n",
+ "\n",
+ "ax2.plot(x, T2n_res1[0].imag, label=R\"$T_\\mathrm{II}(s-0i)$\")\n",
+ "ax2.plot(x, T2p_res1[0].imag, label=R\"$T_\\mathrm{II}(s+0i)$\")\n",
+ "ax2.set_title(R\"$T_\\mathrm{II}$\")\n",
+ "\n",
+ "ax3.plot(x, T3n_res1[0].imag, label=R\"$T_\\mathrm{III}(s-0i)$\")\n",
+ "ax3.plot(x, T3p_res1[0].imag, label=R\"$T_\\mathrm{III}(s+0i)$\")\n",
+ "ax3.set_title(R\"$T_\\mathrm{III}$\")\n",
+ "\n",
+ "ax4.plot(x, T4n_res1[0].imag, label=R\"$T_\\mathrm{III}(s-0i)$\")\n",
+ "ax4.plot(x, T4p_res1[0].imag, label=R\"$T_\\mathrm{IV}(s+0i)$\")\n",
+ "ax4.set_title(R\"$T_\\mathrm{III}$\")\n",
+ "\n",
+ "for ax in axes:\n",
+ " ax.legend()\n",
+ " ax.set_xlabel(R\"$\\mathrm{Re}\\,\\sqrt{s}$\")\n",
+ " ax.set_ylim(-1, +1)\n",
+ "ax1.set_ylabel(R\"$\\mathrm{Im}\\,T(s)$ (a.u.)\")\n",
+ "\n",
+ "fig.tight_layout()\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "### Complex plane (2D)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "tags": []
+ },
+ "source": [
+ "It can be shown that if the resonance mass is above both thresholds the third sheet connects smoothly to the first sheet. If the resonance mass is above the first and below the second threshold the second sheet transitions smoothly into the first sheet."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "full-width",
+ "scroll-input",
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "%config InlineBackend.figure_formats = [\"png\"]\n",
+ "\n",
+ "fig, axes = plt.subplots(figsize=(12, 8), ncols=2, nrows=2, sharey=True)\n",
+ "ax1, ax2, ax3, ax4 = axes.flatten()\n",
+ "\n",
+ "for ax in axes.flatten():\n",
+ " ax.set_xlabel(R\"$\\mathrm{Re}\\,\\sqrt{s}$\")\n",
+ "for ax in axes[:, 0]:\n",
+ " ax.set_ylabel(R\"$\\mathrm{Im}\\,\\sqrt{s}$\")\n",
+ "\n",
+ "ax1.set_title(\"I and II\")\n",
+ "ax2.set_title(\"I and III\")\n",
+ "ax3.set_title(\"I and II\")\n",
+ "ax4.set_title(\"I and III\")\n",
+ "\n",
+ "T_max = 2\n",
+ "\n",
+ "style = dict(vmin=-T_max, vmax=+T_max, cmap=plt.cm.coolwarm)\n",
+ "mesh = ax1.pcolormesh(X, Y, T1p_res1.imag, **style)\n",
+ "ax1.pcolormesh(X, -Y, T2n_res1.imag, **style)\n",
+ "ax2.pcolormesh(X, +Y, T1p_res1.imag, **style)\n",
+ "ax2.pcolormesh(X, -Y, T3n_res1.imag, **style)\n",
+ "ax3.pcolormesh(X, +Y, T1p_res2.imag, **style)\n",
+ "ax3.pcolormesh(X, -Y, T2n_res2.imag, **style)\n",
+ "ax4.pcolormesh(X, +Y, T1p_res2.imag, **style)\n",
+ "ax4.pcolormesh(X, -Y, T3n_res2.imag, **style)\n",
+ "\n",
+ "s_thr1 = parameter_defaults1[ma1] + parameter_defaults1[mb1]\n",
+ "s_thr2 = parameter_defaults1[ma2] + parameter_defaults1[mb2]\n",
+ "linestyle = dict(ls=\"dotted\", lw=1)\n",
+ "for ax in axes.flatten():\n",
+ " ax.axhline(0, c=\"black\", **linestyle)\n",
+ " ax.axvline(s_thr1, c=\"C0\", **linestyle, label=R\"$\\sqrt{s_\\mathrm{thr1}}$\")\n",
+ " ax.axvline(s_thr2, c=\"C1\", **linestyle, label=R\"$\\sqrt{s_\\mathrm{thr2}}$\")\n",
+ "linestyle = dict(c=\"r\", ls=\"dotted\", label=R\"$m_\\mathrm{res}$\")\n",
+ "for ax in axes[0]:\n",
+ " ax.axvline(parameter_defaults1[m0], **linestyle)\n",
+ "for ax in axes[1]:\n",
+ " ax.axvline(parameter_defaults2[m0], **linestyle)\n",
+ "ax2.legend()\n",
+ "\n",
+ "fig.text(0.5, 0.93, R\"$s_{thr1} dict:\n",
+ " sheet_color = sheet_colors[sheet_name]\n",
+ " n_lines = 16\n",
+ " return dict(\n",
+ " cmin=-vmax,\n",
+ " cmax=+vmax,\n",
+ " colorscale=[[0, \"rgb(0, 0, 0)\"], [1, sheet_color]],\n",
+ " contours=dict(\n",
+ " x=dict(\n",
+ " show=True,\n",
+ " start=x.min(),\n",
+ " end=x.max(),\n",
+ " size=(x.max() - x.min()) / n_lines,\n",
+ " color=\"black\",\n",
+ " ),\n",
+ " y=dict(\n",
+ " show=True,\n",
+ " start=-y.max(),\n",
+ " end=+y.max(),\n",
+ " size=(y.max() - y.min()) / (n_lines // 2),\n",
+ " color=\"black\",\n",
+ " ),\n",
+ " ),\n",
+ " name=sheet_name,\n",
+ " opacity=0.4,\n",
+ " showscale=False,\n",
+ " )\n",
+ "\n",
+ "\n",
+ "vmax = 2.0\n",
+ "project = np.imag\n",
+ "sheet_colors = {\n",
+ " \"T1 (physical)\": \"blue\",\n",
+ " \"T2 (unphysical)\": \"red\",\n",
+ " \"T3 (unphysical)\": \"green\",\n",
+ " \"T4 (unphysical)\": \"yellow\",\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "tags": [
+ "hide-input",
+ "full-width",
+ "scroll-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "Sp_I_res1 = go.Surface(x=X, y=+Y, z=T1p_res1.imag, **sty(\"T1 (physical)\"))\n",
+ "Sn_II_res1 = go.Surface(x=X, y=-Y, z=T2n_res1.imag, **sty(\"T2 (unphysical)\"))\n",
+ "Sn_III_res1 = go.Surface(x=X, y=-Y, z=T3n_res1.imag, **sty(\"T3 (unphysical)\"))\n",
+ "\n",
+ "Sp_I_res2 = go.Surface(x=X, y=+Y, z=T1p_res2.imag, **sty(\"T1 (physical)\"))\n",
+ "Sn_II_res2 = go.Surface(x=X, y=-Y, z=T2n_res2.imag, **sty(\"T2 (unphysical)\"))\n",
+ "Sn_III_res2 = go.Surface(x=X, y=-Y, z=T3n_res2.imag, **sty(\"T3 (unphysical)\"))\n",
+ "\n",
+ "thr1_filter = x >= s_thr1\n",
+ "thr2_filter = x >= s_thr2\n",
+ "\n",
+ "line_kwargs = dict(\n",
+ " line=dict(color=\"yellow\", width=8),\n",
+ " mode=\"lines\",\n",
+ " name=\"Lineshape\",\n",
+ ")\n",
+ "lineshape_res1_z = project(T1p_res1[0])\n",
+ "lineshape_res2_z = project(T1p_res2[0])\n",
+ "lineshape_res1 = go.Scatter3d(\n",
+ " x=x[thr1_filter],\n",
+ " y=np.zeros(thr1_filter.shape),\n",
+ " z=lineshape_res1_z[thr1_filter],\n",
+ " **line_kwargs,\n",
+ ")\n",
+ "lineshape_res2 = go.Scatter3d(\n",
+ " x=x[thr1_filter],\n",
+ " y=np.zeros(thr1_filter.shape),\n",
+ " z=lineshape_res2_z[thr1_filter],\n",
+ " **line_kwargs,\n",
+ ")\n",
+ "\n",
+ "point_kwargs = dict(\n",
+ " hoverinfo=\"text\",\n",
+ " marker=dict(color=DEFAULT_PLOTLY_COLORS[:2], size=6),\n",
+ " mode=\"markers\",\n",
+ " text=[\"threshold 1\", \"threshold 2\"],\n",
+ ")\n",
+ "thr_points_res1 = go.Scatter3d(\n",
+ " x=[s_thr1, s_thr2],\n",
+ " y=[0, 0],\n",
+ " z=[lineshape_res1_z[thr1_filter][0], lineshape_res1_z[thr2_filter][0]],\n",
+ " **point_kwargs,\n",
+ ")\n",
+ "thr_points_res2 = go.Scatter3d(\n",
+ " x=[s_thr1, s_thr2],\n",
+ " y=[0, 0],\n",
+ " z=[lineshape_res2_z[thr1_filter][0], lineshape_res2_z[thr2_filter][0]],\n",
+ " **point_kwargs,\n",
+ ")\n",
+ "\n",
+ "plotly_fig = make_subplots(\n",
+ " rows=2,\n",
+ " cols=2,\n",
+ " horizontal_spacing=0.01,\n",
+ " vertical_spacing=0.05,\n",
+ " specs=[\n",
+ " [{\"type\": \"surface\"}, {\"type\": \"surface\"}],\n",
+ " [{\"type\": \"surface\"}, {\"type\": \"surface\"}],\n",
+ " ],\n",
+ " subplot_titles=[\n",
+ " \"thr₁ < thr₂ < mᵣ\",\n",
+ " \"thr₁ < mᵣ < thr₂\",\n",
+ " ],\n",
+ ")\n",
+ "\n",
+ "# thr₁ < thr₂ < mᵣ\n",
+ "selector = dict(col=1, row=1)\n",
+ "plotly_fig.add_trace(Sp_I_res1, **selector)\n",
+ "plotly_fig.add_trace(Sn_III_res1, **selector)\n",
+ "plotly_fig.add_trace(lineshape_res1, **selector)\n",
+ "plotly_fig.add_trace(thr_points_res1, **selector)\n",
+ "selector = dict(col=1, row=2)\n",
+ "plotly_fig.add_trace(Sp_I_res1, **selector)\n",
+ "plotly_fig.add_trace(Sn_II_res1, **selector)\n",
+ "plotly_fig.add_trace(lineshape_res1, **selector)\n",
+ "plotly_fig.add_trace(thr_points_res1, **selector)\n",
+ "\n",
+ "# thr₁ < mᵣ < thr₂\n",
+ "selector = dict(col=2, row=1)\n",
+ "plotly_fig.add_trace(Sp_I_res2, **selector)\n",
+ "plotly_fig.add_trace(Sn_II_res2, **selector)\n",
+ "plotly_fig.add_trace(lineshape_res2, **selector)\n",
+ "plotly_fig.add_trace(thr_points_res2, **selector)\n",
+ "selector = dict(col=2, row=2)\n",
+ "plotly_fig.add_trace(Sp_I_res2, **selector)\n",
+ "plotly_fig.add_trace(Sn_III_res2, **selector)\n",
+ "plotly_fig.add_trace(lineshape_res2, **selector)\n",
+ "plotly_fig.add_trace(thr_points_res2, **selector)\n",
+ "\n",
+ "plotly_fig.update_layout(\n",
+ " height=600,\n",
+ " margin=dict(l=0, r=0, t=20, b=0),\n",
+ " showlegend=False,\n",
+ ")\n",
+ "\n",
+ "plotly_fig.update_scenes(\n",
+ " camera_center=dict(z=-0.1),\n",
+ " camera_eye=dict(x=1.4, y=1.4, z=1.4),\n",
+ " xaxis_range=(2.0, 5.0),\n",
+ " xaxis_title_text=\"Re √s\",\n",
+ " yaxis_title_text=\"Im √s\",\n",
+ " zaxis_title_text=\"Im T(s)\",\n",
+ " zaxis_range=[-vmax, +vmax],\n",
+ ")\n",
+ "plotly_fig.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Complex plane widget"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib widget"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "cellView": "form",
+ "editable": true,
+ "tags": [
+ "hide-input",
+ "scroll-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# @title\n",
+ "sliders = dict(\n",
+ " g01=w.FloatSlider(\n",
+ " description=R\"$g^{0}_1$\",\n",
+ " value=0.5,\n",
+ " min=-2,\n",
+ " max=+2,\n",
+ " ),\n",
+ " g02=w.FloatSlider(\n",
+ " description=\"$g^{0}_2$\",\n",
+ " value=0.5,\n",
+ " min=-2,\n",
+ " max=+2,\n",
+ " ),\n",
+ " m0=w.FloatSlider(\n",
+ " description=\"$m_0$\",\n",
+ " value=4,\n",
+ " min=0,\n",
+ " max=+6,\n",
+ " ),\n",
+ " T_max=w.FloatSlider(\n",
+ " description=R\"$T_\\mathrm{max}$\",\n",
+ " value=1.0,\n",
+ " min=0.1,\n",
+ " max=6.0,\n",
+ " step=0.1,\n",
+ " ),\n",
+ ")\n",
+ "\n",
+ "\n",
+ "fig, axes = plt.subplots(\n",
+ " figsize=(11, 6),\n",
+ " ncols=2,\n",
+ " nrows=2,\n",
+ " sharex=True,\n",
+ " gridspec_kw={\"height_ratios\": [1, 2]},\n",
+ ")\n",
+ "fig.canvas.toolbar_visible = False\n",
+ "fig.canvas.header_visible = False\n",
+ "fig.canvas.footer_visible = False\n",
+ "ax1d1, ax1d2, ax2d1, ax2d2 = axes.flatten()\n",
+ "\n",
+ "for ax in axes[1]:\n",
+ " ax.set_xlabel(R\"$\\mathrm{Re}\\,\\sqrt{s}$\")\n",
+ "ax1d1.set_ylabel(\"Intensity (a.u.)\")\n",
+ "ax2d1.set_ylabel(R\"$\\mathrm{Im}\\,\\sqrt{s}$\")\n",
+ "\n",
+ "ax1d1.set_title(\"I and II\")\n",
+ "ax1d2.set_title(\"I and III\")\n",
+ "\n",
+ "R_color = \"C4\"\n",
+ "T1_color = \"C0\"\n",
+ "T2_color = \"C3\"\n",
+ "T3_color = \"C2\"\n",
+ "\n",
+ "\n",
+ "LINES = None\n",
+ "MESH = None\n",
+ "style = dict(cmap=plt.cm.coolwarm)\n",
+ "\n",
+ "\n",
+ "def plot(m0, g01, g02, T_max):\n",
+ " global LINES, MESH\n",
+ " local_args = args1[:-3] + (m0, g01, g02)\n",
+ " T1p_res1 = T_I_func(Zp**2, *local_args)\n",
+ " T2n_res1 = T_II_func(Zn**2, *local_args)\n",
+ " T3n_res1 = T_III_func(Zn**2, *local_args)\n",
+ " T1y = np.abs(T1p_res1[0]) ** 2\n",
+ " T2y = np.abs(T2n_res1[0]) ** 2\n",
+ " T3y = np.abs(T3n_res1[0]) ** 2\n",
+ " if MESH is None and LINES is None:\n",
+ " LINES = [\n",
+ " ax1d1.axvline(m0, c=R_color, ls=\"dashed\"),\n",
+ " ax1d2.axvline(m0, c=R_color, ls=\"dashed\"),\n",
+ " ax2d1.axvline(m0, c=R_color, ls=\"dashed\", label=R\"$m_\\mathrm{res}$\"),\n",
+ " ax2d2.axvline(m0, c=R_color, ls=\"dashed\", label=R\"$m_\\mathrm{res}$\"),\n",
+ " ax1d1.plot(x, T1y, c=T1_color, label=R\"$\\left|T_\\mathrm{I}\\right|^2$\")[0],\n",
+ " ax1d1.plot(\n",
+ " x, T2y, c=T2_color, label=R\"$\\left|T_\\mathrm{II}\\right|^2$\", ls=\"dotted\"\n",
+ " )[0],\n",
+ " ax1d2.plot(x, T1y, c=T1_color, label=R\"$\\left|T_\\mathrm{I}\\right|^2$\")[0],\n",
+ " ax1d2.plot(\n",
+ " x,\n",
+ " T3y,\n",
+ " c=T3_color,\n",
+ " label=R\"$\\left|T_\\mathrm{III}\\right|^2$\",\n",
+ " ls=\"dotted\",\n",
+ " )[0],\n",
+ " ]\n",
+ " MESH = [\n",
+ " ax2d1.pcolormesh(X, Y, T1p_res1.imag, **style),\n",
+ " ax2d1.pcolormesh(X, -Y, T2n_res1.imag, **style),\n",
+ " ax2d2.pcolormesh(X, +Y, T1p_res1.imag, **style),\n",
+ " ax2d2.pcolormesh(X, -Y, T3n_res1.imag, **style),\n",
+ " ]\n",
+ " else:\n",
+ " MESH[0].set_array(T1p_res1.imag)\n",
+ " MESH[1].set_array(T2n_res1.imag)\n",
+ " MESH[2].set_array(T1p_res1.imag)\n",
+ " MESH[3].set_array(T3n_res1.imag)\n",
+ " LINES[0].set_xdata(m0)\n",
+ " LINES[1].set_xdata(m0)\n",
+ " LINES[2].set_xdata(m0)\n",
+ " LINES[3].set_xdata(m0)\n",
+ " LINES[4].set_ydata(T1y)\n",
+ " LINES[5].set_ydata(T2y)\n",
+ " LINES[6].set_ydata(T1y)\n",
+ " LINES[7].set_ydata(T3y)\n",
+ " for mesh in MESH:\n",
+ " mesh.set_clim(-T_max, +T_max)\n",
+ " for ax in axes[0]:\n",
+ " ax.set_ylim(0, max(T1y) * 1.05)\n",
+ " fig.canvas.draw()\n",
+ "\n",
+ "\n",
+ "for ax in axes[:, 1]:\n",
+ " ax.set_yticks([])\n",
+ "for ax in axes[1]:\n",
+ " ax.axhline(0, c=\"black\", ls=\"dotted\", lw=1)\n",
+ "for ax in axes[0]:\n",
+ " ax.axvline(s_thr1, c=\"C0\", ls=\"dotted\", lw=1)\n",
+ " ax.axvline(s_thr2, c=\"C1\", ls=\"dotted\", lw=1)\n",
+ "for ax in axes[1]:\n",
+ " ax.axvline(s_thr1, c=\"C0\", label=R\"$\\sqrt{s_\\mathrm{thr1}}$\", ls=\"dotted\", lw=1)\n",
+ " ax.axvline(s_thr2, c=\"C1\", label=R\"$\\sqrt{s_\\mathrm{thr2}}$\", ls=\"dotted\", lw=1)\n",
+ "ax2d1.text(0.5, +0.7, R\"$T_\\mathrm{I}$\", color=T1_color, size=20)\n",
+ "ax2d1.text(0.5, -0.75, R\"$T_\\mathrm{II}$\", color=T2_color, size=20)\n",
+ "ax2d2.text(0.5, +0.7, R\"$T_\\mathrm{I}$\", color=T1_color, size=20)\n",
+ "ax2d2.text(0.5, -0.75, R\"$T_\\mathrm{III}$\", color=T3_color, size=20)\n",
+ "\n",
+ "output = w.interactive_output(plot, controls=sliders)\n",
+ "UI = w.VBox(list(sliders.values()))\n",
+ "fig.tight_layout()\n",
+ "ax1d1.legend()\n",
+ "ax1d2.legend()\n",
+ "ax2d2.legend()\n",
+ "display(output, UI)"
+ ]
+ }
+ ],
+ "metadata": {
+ "colab": {
+ "toc_visible": true
+ },
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.14"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/docs/symbolics.ipynb b/docs/symbolics.ipynb
new file mode 100644
index 00000000..1b38b1bd
--- /dev/null
+++ b/docs/symbolics.ipynb
@@ -0,0 +1,875 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "```{autolink-concat}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ ""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Symbolic amplitude models"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": [
+ "remove-cell"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "%pip install -q black==24.2.0 sympy==1.12"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Amplitude analysis is a method that is used intensively in particle and hadron physics experiments. It allows us to describes the intensity distributions obtained from the experiments with the use of amplitude models. These allow us to extract parameters about intermediate states appearing in the scattering processes, which are governed by the electroweak force and the strong force.\n",
+ "\n",
+ "The complicated nature of the strong force, described by Quantum Chromodynamics, makes it difficult to derive intensity models from first principles. Instead, we have to rely on approximations given specific assumptions for the scattering process that we study. Each amplitude model that we formulate, is almost always merely an approximation of the true scattering process. As a consequence, we always have to reassess our analysis results and try alternative models. In addition, amplitude models can be extremely complicated, with large, complex-valued parametrizations and dozens of input parameters. We therefore want to evaluate these models with as much information as possible. That means large input data samples and 'fits' using the full likelihood function, which provides us a multidimensional description of the data by using event-based, unbinned fit methods.\n",
+ "\n",
+ "Given these challenges, we can identify **three major requirements that amplitude analysis software should satisfy**:"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ ":::{card} {material-regular}`speed` Performance\n",
+ ":link: performance\n",
+ ":link-type: ref\n",
+ "We want to evaluate likelihood functions as fast as possible over large data samples, so that we can optimize our model parameters by testing several hypotheses in due time.\n",
+ ":::\n",
+ "\n",
+ ":::{card} {material-regular}`draw` Flexibility\n",
+ ":link: flexibility\n",
+ ":link-type: ref\n",
+ "We want to quickly formulate a wide range of amplitude models, given the latest theoretical and experimental insights.\n",
+ ":::\n",
+ "\n",
+ ":::{card} {material-regular}`school` Transparency\n",
+ ":link: transparency\n",
+ ":link-type: ref\n",
+ "It should be easy to inspect the implemented amplitude models, ideally by using mathematical formulas, so that the analysis can easily be reproduced or compared to results from other experiments, tools, or theoretical models.\n",
+ ":::\n",
+ "\n",
+ "---"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "(performance)=\n",
+ "## {material-regular}`speed` Performance"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "(array-oriented)=\n",
+ "### Array-oriented programming"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "Even though Python is a popular programming language for data science, it is too slow for performing computations over large data samples. Computations in Python programs are therefore almost always outsourced through third-party Python libraries that are written in C++ or other compiled languages. This leads to an **array-oriented programming** style. Variables represent multidimensional arrays and the computational backend performs the element-wise operations behind-the-scenes. This has the additional benefit that the higher level Python code becomes more readable.\n",
+ "\n",
+ "In the following example, we have two data samples $a$ and $b$, each containing a million data points, and we want to compute $c_i=a_i+b_i^2$ for each of these data point $i$. For simplicity, we set both $a$ and $b$ to be `[0, 1, 2, ..., 999_999]`."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "a_lst = list(range(1_000_000))\n",
+ "b_lst = list(range(1_000_000))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "#### Pure Python loop"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "Naively, one could compute $c$ for each data point by creating a list and filling it with $c_i = a_i+b_i^2$."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%%timeit\n",
+ "c_lst = []\n",
+ "for a_i, b_i in zip(a_lst, a_lst):\n",
+ " c_lst.append(a_i + b_i**2)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "`for` loops like these are a natural choice when coming from compiled languages like C++, but are considerably much slower when done with Python."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "#### Equivalent computation with arrays"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "[NumPy](https://numpy.org) is one of the most popular array-oriented libraries for Python. The data points for $a$ and $b$ are now represented by array objects..."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "\n",
+ "a = np.array(a_lst)\n",
+ "b = np.array(b_lst)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "...and the _array-oriented_ computation of $c = a+b^2$ becomes much **faster** and **more readable**."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%%timeit\n",
+ "c = a + b**2"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Accelerated computational libraries"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The 2010s saw the release of a number of Python packages for highly optimized numerical computational backends that gained popularity within the data science and Machine Learning communities. Examples of these are [Numba](https://numba.pydata.org) (2012), [TensorFlow](https://tensorflow.org) (2015), [Pytorch](https://pytorch.org) (2016), and [JAX](https://jax.rtfd.io) (2018). Just like NumPy, the core of these packages is written in highly performant languages like C++, but apply several smart techniques to make the computations even faster. The main techniques that these backends apply are:\n",
+ "\n",
+ "- **Just-In-Time Compilation** (JIT): Python code is compiled if and only if it is run. JIT not only offers performance in a dynamic workflow, but also allows the compiler to optimize the code at runtime based on the actual data input.\n",
+ "- **Hardware acceleration**: JIT compilation is performed through an intermediate, device-agnostic layer of code (particularly [XLA](https://openxla.org/xla)), which allows the user to run their code not only on regular CPUs, but also on different types of hardware accelerators, like GPUs and TPUs.\n",
+ "- **Parallelization**: array-oriented computations can automatically parallelized over multiple CPU cores (multithreading) or multiple CPU, GPU or TPU devices (multiprocessing).\n",
+ "- **Automatic Differentiation**: Many of these libraries can automatically compute derivatives, which is useful for gradient-based optimization algorithms. While this functionality was designed with linear Machine Learning models in mind, it can be used to compute exact gradients over mathematical models.\n",
+ "\n",
+ "These techniques are usually directly available with minor changes to existing [array-oriented code](#array-oriented). In most cases, it is just a matter of decorating the array-oriented function with a JIT-compile decorator and, where needed, replacing the calls to vectorized functions (such as summing up a column in two-dimensional array) with their accelerated equivalents."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "::::{tab-set}\n",
+ "\n",
+ ":::{tab-item} Original\n",
+ "```python\n",
+ "import numpy as np\n",
+ "\n",
+ "def my_func(a, b):\n",
+ " return np.sum(a + b**2, axis=0)\n",
+ "```\n",
+ ":::\n",
+ "\n",
+ ":::{tab-item} Numba\n",
+ "```python\n",
+ "import numba as nb\n",
+ "import numpy as np\n",
+ "\n",
+ "@nb.jit(nopython=True)\n",
+ "def my_func(a, b):\n",
+ " return np.sum(a + b**2, axis=0)\n",
+ "```\n",
+ ":::\n",
+ "\n",
+ ":::{tab-item} JAX\n",
+ "```python\n",
+ "import jax\n",
+ "import jax.numpy as jnp\n",
+ "\n",
+ "@jax.jit\n",
+ "def my_func(a, b):\n",
+ " return jnp.sum(a + b**2, axis=0)\n",
+ "```\n",
+ ":::\n",
+ "\n",
+ ":::{tab-item} TensorFlow\n",
+ "```python\n",
+ "import tensorflow as tf\n",
+ "import tensorflow.experimental.numpy as tnp\n",
+ "\n",
+ "@tf.function(jit_compile=True)\n",
+ "def my_func(a, b):\n",
+ " return tnp.sum(a + b**2, axis=0)\n",
+ "```\n",
+ ":::\n",
+ "\n",
+ ":::{tab-item} Pytorch\n",
+ "```python\n",
+ "import torch\n",
+ "\n",
+ "@torch.jit.script\n",
+ "def my_func(a, b):\n",
+ " return torch.sum(a + b**2, dim=0)\n",
+ "```\n",
+ ":::\n",
+ "\n",
+ "::::"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "As can be seen, the implementation of the array-oriented NumPy function remains largely unaffected by the switch to these accelerated computational libraries. The resulting JIT-compiled function objects are automatically compiled and parallelized for the selected device for fast numerical computations over large data samples.\n",
+ "\n",
+ ":::{topic} {material-regular}`speed` Performance ✔️\n",
+ "Array-oriented programming allows for concise, recognizable implementations of mathematical models. Accelerated libraries like JAX and Numba can transform these implementations so that high-performance numerical computing can be achieved with trivial changes to the code.\n",
+ ":::\n",
+ "\n",
+ "---"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "(flexibility)=\n",
+ "## {material-regular}`draw` Flexibility"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Python offers us flexibility to write concise and understandable code that can be run code interactively through a terminal or with Jupyter Notebooks. As we saw, [array-oriented computational backends](#array-oriented) make this code suitable for high-performance, parallelized computations over large data samples. The fact that array-oriented code looks so similar for different accelerated computational libraries begs the question whether we can find a way to **directly convert the mathematical expressions that we as physicists are familiar with into these fast numerical functions**. It turns out that we can do this using a Computer Algebra System."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Computer Algebra System"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Programs like [Mathematica](https://www.wolfram.com/mathematica), [Maple](https://www.maplesoft.com/products/Maple) and [Matlab](https://www.mathworks.com/products/matlab.html) are popular examples for mathematics, physicists, and engineers, as they allow to simplify expression, solve equations or integrals, investigate their behavior with plots, et cetera. At core, these programs are [Computer Algebra Systems](https://en.wikipedia.org/wiki/List_of_computer_algebra_systems) (CAS) that represent mathematical expressions as graphs or trees and transform and modify them through algorithms that implement algebraic operations.\n",
+ "\n",
+ "The most commonly used CAS in Python is [SymPy](https://docs.sympy.org) and it has a major advantage over commercial CAS programs in that it is [open source and can be used as a library](https://docs.sympy.org/latest/tutorials/intro-tutorial/intro.html#why-sympy). This allows us to integrate it into our own applications for amplitude analysis or build up simple mathematical expressions in Jupyter notebooks, so that we can inspect them in $\\LaTeX$ form. For example, a simple Breit-Wigner function is written as:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import sympy as sp\n",
+ "\n",
+ "s, m0, Γ0, g = sp.symbols(\"s m0 Gamma0 g\")\n",
+ "expression = g * m0 * Γ0 / (m0**2 - s - sp.I * Γ0 * m0)\n",
+ "expression"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ ":::{note}\n",
+ "SymPy orders symbolic terms [its own way](https://stackoverflow.com/a/36344627) if they are commutative, independent on the Python code given by the user.\n",
+ ":::"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Expression trees"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "Internally, SymPy expressions are built up by applying mathematical operations to algebraic objects, such as symbols and \n",
+ "numbers. In this example, we see how the Breit-Wigner function is built up from four symbols, a complex number, and a few integers. The resulting expression can be visualized as an **expression tree** of fundamental mathematical operations."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "import graphviz\n",
+ "\n",
+ "style = [\n",
+ " (sp.Atom, {\"color\": \"grey\", \"fontcolor\": \"grey\"}),\n",
+ " (sp.Symbol, {\"color\": \"royalblue\", \"fontcolor\": \"royalblue\"}),\n",
+ "]\n",
+ "src = sp.dotprint(expression, styles=style)\n",
+ "graphviz.Source(src)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Algebraic substitutions"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "An example of an algebraic computation is algebraic substitution of some of the symbols. Here's an example where we substitute the symbols $N$, $m_0$, and $\\Gamma_0$ with fixed values (like model parameters)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "substituted_expr = expression.subs({m0: 0.980, Γ0: 0.06, g: 1})\n",
+ "substituted_expr"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "With the substitutions, the expression tree shrinks. Subtrees that contained only real-valued numbers or one of the three substituted symbols are collapsed into a single number node."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "src = sp.dotprint(substituted_expr.n(3), styles=style)\n",
+ "graphviz.Source(src)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Code generation"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "Expression trees are not only useful for applying algebraic operations to their nodes. They can also be used as a **template for generating code**. In fact, the $\\LaTeX$ formula is generated using SymPy's $\\LaTeX$ printer:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": [
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "from IPython.display import Markdown\n",
+ "\n",
+ "src = sp.latex(expression)\n",
+ "Markdown(f\"```latex\\n{src}\\n```\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "SymPy provides a [large number of code printers](https://docs.sympy.org/latest/modules/codegen.html) for different languages and human-readable serialization standards. A few examples are shown below."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": [
+ "scroll-output",
+ "scroll-input",
+ "hide-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "from IPython.display import Markdown\n",
+ "from sympy.printing.mathml import MathMLPresentationPrinter\n",
+ "\n",
+ "\n",
+ "def to_mathml(expr: sp.Expr) -> str:\n",
+ " printer = MathMLPresentationPrinter()\n",
+ " xml = printer._print(expr)\n",
+ " return xml.toprettyxml().replace(\"\\t\", \" \")\n",
+ "\n",
+ "\n",
+ "Markdown(\n",
+ " f\"\"\"\n",
+ "```python\n",
+ "# Python\n",
+ "{sp.pycode(expression)}\n",
+ "```\n",
+ "```cpp\n",
+ "// C++\n",
+ "{sp.cxxcode(expression, standard=\"c++17\")}\n",
+ "```\n",
+ "```fortran\n",
+ "! Fortran\n",
+ "{sp.fcode(expression).strip()}\n",
+ "```\n",
+ "```julia\n",
+ "# Julia\n",
+ "{sp.julia_code(expression)}\n",
+ "```\n",
+ "```rust\n",
+ "// Rust\n",
+ "{sp.rust_code(expression)} \n",
+ "```\n",
+ "```xml\n",
+ "\n",
+ "{to_mathml(expression)}\n",
+ "```\n",
+ "\"\"\"\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Since SymPy is a Python library, the code generation process can be [completely customized](https://docs.sympy.org/latest/modules/printing.html). This allows us to generate code for languages that are not yet implemented or modify the behavior of existing code printers, which can be used to **generate [array-oriented Python code](#accelerated-computational-libraries)** for several computational libraries. For the Breit-Wigner example, the generated NumPy function looks like the following."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "full_func = sp.lambdify(args=(s, m0, Γ0, g), expr=expression, modules=\"numpy\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "import inspect\n",
+ "\n",
+ "import black\n",
+ "\n",
+ "src = inspect.getsource(full_func)\n",
+ "src = black.format_str(src, mode=black.FileMode())\n",
+ "Markdown(f\"```python\\n{src}\\n```\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "substituted_func = sp.lambdify(args=s, expr=substituted_expr, modules=\"numpy\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "editable": true,
+ "jupyter": {
+ "source_hidden": true
+ },
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": [
+ "remove-input"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "src = inspect.getsource(substituted_func)\n",
+ "src = black.format_str(src, mode=black.FileMode())\n",
+ "Markdown(f\"```python\\n{src}\\n```\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ ":::{sidebar}\n",
+ "\n",
+ ":::"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "The example as described here is small for illustrative purposes. It turns out that code generation works just as well for **expressions with a much larger number of mathematical operations**, even if in the order of hundreds of thousands. This is exactly what is needed for fitting amplitude models to data.\n",
+ "\n",
+ "We now have a flexible and transparent way of formulating amplitude models that can be easily modified with algebraic operations. The models can immediately be inspected as mathematical expressions and can be used as template for generating array-oriented numerical functions for efficient computations over large data samples. In addition, any algebraic operations that simplify the expression tree directly map onto the generated array-oriented code, which can result in better numerical performance of the generated code."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ ":::{topic} {material-regular}`draw` Flexibility ✔️\n",
+ "A Computer Algebra System provides a simple way to **separate physics from number crunching**. Amplitude models only have to be formulated symbolically, while computations are outsourced to array-oriented, numerical libraries through automated code generation. This provides us a **[Single Source of Truth](https://en.wikipedia.org/wiki/Single_source_of_truth)** for implemented physics models.\n",
+ ":::\n",
+ "\n",
+ "---"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "(transparency)=\n",
+ "## {material-regular}`school` Transparency"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We have seen how a Computer Algebra System that generates array-oriented code allows us to formulate [performant](#performance) and [flexible](#flexibility) amplitude models. Physicists can now focus on implementing theory in a central place, while the computations are outsourced to optimized libraries. In itself, these are ingredients that make it much easier to write analysis code. However, the set-up offers major indirect benefits to the wider amplitude analysis community as well."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Self-documenting workflow"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The combination of a CAS, performant code generation, and a dynamically typed language like Python is ideal when working with Jupyter notebooks. Amplitude models can be built up interactively with the option to directly inspect their implementation as mathematical formulas or plot their behavior with visualization libraries. Fast, numerical performance is near at hand through automatic code generation, which bridges the gab between formulating amplitude models to performing fits.\n",
+ "\n",
+ "This ties in perfectly with recent trends in data science and modern publishing tools. In recent years, there have been several initiatives that render Jupyter notebooks as publication-ready documents (websites, PDF files, etc.). Community-wide examples are the [Executable Book Project](https://executablebooks.org), [Curvenote](https://curvenote.com), and [Quarto](https://quarto.org), while CERN has launched platforms like [SWAN](https://swan.docs.cern.ch) for running notebooks with direct access to CERN computing resources and [Reana](https://www.reanahub.io) for making analyses more reproducible and scalable.\n",
+ "\n",
+ "Given these trends, writing an amplitude analysis with symbolics is therefore not only intuitive to the physicist who write it, but results in a **self-documenting workflow** that naturally evolves towards publication-ready materials. An example is the [polarimetry analysis by LHCb](https://doi.org/10.1007/JHEP07(2023)228), where the entire implemented amplitude model is [directly rendered from the codebase](https://lc2pkpi-polarimetry.docs.cern.ch/amplitude-model.html#amplitude) as mathematical formulas and analysis results from high-performance computations are directly available through code generation."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "### Model preservation"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "Amplitude analyses are notoriously hard to reproduce. There are often several ways to formulate the same model, the model complexity makes the source code of the analysis model hard to understand, and starting conditions in a fit can lead to completely different fit results. Formulating the amplitude models symbolically addresses exactly these difficulties.\n",
+ "\n",
+ "First of all, the [self-documenting workflow](#self-documenting-workflow) with symbolic expressions removes the need for readers to dive through the underlying code, as the formulas are directly visible to the reader. Mathematics is the language we all speak. This easily allows others to reimplement models into their own framework of choice, now or in the future, in e.g. new programming languages.\n",
+ "\n",
+ "Second, a symbolic amplitude model can be serialized to human-readable format with [code generation](#code-generation). Just as with the generation of numerical functions, the model's [expression tree](#expression-trees) can be used to generate nodes in a serialization format like YAML or JSON. Other analysis frameworks can then import the model for cross-checks or for adapting the analysis to other experiments.\n",
+ "\n",
+ "On a technical note, the Python ecosystem in combination with Jupyter Notebooks and Sphinx makes it possible for any reader to directly rerun analysis in the browser or in some local environment. [Pinned dependencies](https://github.com/ComPWA/update-pip-constraints) ensure that the analysis produces the same results."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "editable": true,
+ "slideshow": {
+ "slide_type": ""
+ },
+ "tags": []
+ },
+ "source": [
+ "### Knowledge exchange"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Exciting times are coming for amplitude analysis studies. On the one hand, collider experiments are producing increasingly large data samples that provide high statistics that are suitable for amplitude analysis and challenge the tools that are on the market. On the other hand, computational power has become so widely available, that it becomes possible to perform fits with more complicated models over large data input. While this provides many opportunities, it also poses challenges to the community.\n",
+ "\n",
+ "There are many physicists who have little background in amplitude analysis, but need to become familiar with the theory and the techniques. For now, however, the literature is sparse, highly technical, and often specific to the experiment for which it was written. This makes the learning curve for newcomers extremely steep, so that it takes a lot of time before one can perform an actual amplitude analysis.\n",
+ "\n",
+ "As amplitude models become more complex, it becomes crucial to get input from other experiments. This requires a proper matching of amplitude models and therefore it is essential that the models become more reproducible, extendable, and portable. All of this makes it paramount for the amplitude analysis community to provide easy access to the basic principles of the theoretical model.\n",
+ "\n",
+ "Symbolics can play a valuable role here as well, as it becomes much easier to share and maintain knowledge gained about amplitude models and amplitude analysis theory. Symbolic amplitude models directly show the implemented mathematics and their numerical functions can directly be used for interactive visualizations. In addition, the [self-documenting workflow](#self-documenting-workflow) makes it more inviting to contribute to community documentation as it narrows the gap between theory and code."
+ ]
+ }
+ ],
+ "metadata": {
+ "colab": {
+ "toc_visible": true
+ },
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/environment.yml b/environment.yml
index aa7d5a26..9a6d456b 100644
--- a/environment.yml
+++ b/environment.yml
@@ -10,5 +10,5 @@ dependencies:
- pip:
- -c .constraints/py3.10.txt -e .[dev]
variables:
- PRETTIER_LEGACY_CLI: "1"
+ PRETTIER_LEGACY_CLI: 1
PYTHONHASHSEED: 0
diff --git a/pyproject.toml b/pyproject.toml
index fd4d3157..86d2077b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -243,8 +243,9 @@ split-on-trailing-comma = false
"**/016.ipynb" = ["PLC2701"]
"**/021.ipynb" = ["I001"]
"**/022.ipynb" = ["PLC2701"]
-"**/024.ipynb" = ["E731", "E741", "PGH001", "S307"]
+"**/024.ipynb" = ["E731", "E741", "S307"]
"**/025.ipynb" = ["E731"]
+"**/98*.ipynb" = ["E731", "PLR6301"]
"*.ipynb" = [
"A003",
"B008",
@@ -253,6 +254,7 @@ split-on-trailing-comma = false
"C90",
"D",
"E703",
+ "E741",
"F404",
"N802",
"N803",
@@ -269,6 +271,7 @@ split-on-trailing-comma = false
"PLW2901",
"RUF027", # for _latex_repr_
"S101",
+ "S307",
"T20",
"TCH00",
]
@@ -287,12 +290,20 @@ split-on-trailing-comma = false
"PLW2901",
]
"docs/report/002.ipynb" = ["F821"]
-"docs/report/003.ipynb" = ["F821"]
-"docs/report/005.ipynb" = ["F821"]
+"docs/report/003.ipynb" = ["F821", "S404", "S603"]
+"docs/report/004.ipynb" = ["S404", "S603"]
+"docs/report/005.ipynb" = ["F821", "S404", "S603"]
+"docs/report/006.ipynb" = ["S404", "S603"]
+"docs/report/009.ipynb" = ["S404", "S603"]
+"docs/report/010.ipynb" = ["S404", "S603"]
"docs/report/011.ipynb" = ["F821"]
+"docs/report/017.ipynb" = ["S404", "S603"]
"docs/report/020.ipynb" = ["F821"]
+"docs/report/021.ipynb" = ["S404", "S603"]
"docs/report/022.ipynb" = ["F821"]
"docs/report/024.ipynb" = ["F821", "S102"]
+"docs/report/025.ipynb" = ["S404", "S603"]
+"docs/report/027.ipynb" = ["PLW1510", "S404", "S603"]
"pin_nb_requirements.py" = ["INP001", "PLW2901"]
"setup.py" = ["D100"]
diff --git a/tox.ini b/tox.ini
index fcfb17ea..4a169cd3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -20,7 +20,7 @@ description =
Build documentation and API through Sphinx
passenv = *
setenv =
- FORCE_COLOR = yes
+ FORCE_COLOR = 1
[testenv:doclive]
allowlist_externals =
@@ -48,7 +48,7 @@ description =
Set up a server to directly preview changes to the HTML pages
passenv = *
setenv =
- FORCE_COLOR = yes
+ FORCE_COLOR = 1
[testenv:docnb]
allowlist_externals =
@@ -65,7 +65,7 @@ description =
passenv = *
setenv =
EXECUTE_NB = yes
- FORCE_COLOR = yes
+ FORCE_COLOR = 1
[testenv:docnblive]
allowlist_externals =
@@ -94,7 +94,7 @@ description =
passenv = *
setenv =
EXECUTE_NB = yes
- FORCE_COLOR = yes
+ FORCE_COLOR = 1
[testenv:docnb-force]
allowlist_externals =
@@ -105,7 +105,7 @@ description =
Execute ALL Jupyter notebooks and build documentation with Sphinx
passenv = *
setenv =
- FORCE_COLOR = yes
+ FORCE_COLOR = 1
FORCE_EXECUTE_NB = yes
PYTHONHASHSEED = 0
@@ -131,7 +131,7 @@ description =
Check external links in the documentation (requires internet connection)
passenv = *
setenv =
- FORCE_COLOR = yes
+ FORCE_COLOR = 1
[testenv:nb]
allowlist_externals =